commit 4fede4590da44c2f779e6184eeccd530fe2ebf32 Author: Michael Wedl Date: Tue May 2 13:44:15 2023 +0200 Publish SysReptor community edition diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 0000000..6cbb4dd --- /dev/null +++ b/.dockerignore @@ -0,0 +1,29 @@ +.idea/ +.vscode/ + +.git/ +.gitignore +.gitlab-ci-yml +.gitkeep + +.env* +**/.env* +__pycache__ +**/__pycache__ +*.pyc +**/*.pyc +*.log +**/*.log +*.sql +**/*.sql +*.egg-info +**/*.egg-info + +Dockerfile +docker-compose.yml +.dockerignore + +node_modules/ +**/node_modules/ + +api/data \ No newline at end of file diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..322288a --- /dev/null +++ b/.gitignore @@ -0,0 +1,5 @@ +.idea +app.env +**/app.env +.DS_Store +.scannerwork diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 0000000..c4a3b12 --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,92 @@ +# Changelog + +## v0.76 - 2023-05-02 +* Release Community Edition +* Add license checks and enforce license limits +* Project archiving and encryption with 4-eye principle +* Improve list editing in markdown editor +* Add a refresh PDF button to the publish project page + + +## v0.19 - 2023-04-11 +* Add private designs visible only to your user +* Support Postgres with PgBouncer in LanguageTool +* Allow storing files in S3 buckets +* Fix backup restore failing for notifications + + +## v0.18 - 2023-03-13 +* Allow setting emojis as custom note icons +* Require re-authentication to enable admin permissions in user sessions +* Test and improve backup and restore logic +* Automatically cleanup unreferenced files and images +* Add words to spellcheck dictionary +* Allow removing and updating roles of imported project members +* Fix label not shown for number fields + + +## v0.17 - 2023-03-01 +* Use variable Open Sans font to fix footnote-call rendering ("font-variant-position: super" not applied) + + +## v0.16 - 2023-02-23 +* Personal and per-project notes +* Use asgi instead of wsgi to support async requests +* Async PDF rendering and spellcheck request +* Support Elastic APM for API and frontend monitoring +* Fetch and display notifications to users +* Add titles to pages in frontend + + +## v0.15 - 2023-02-06 +* Support login via OpenID Connect +* Support offloading PDF rendering to a pool of worker instances +* Spellchecking and highlighting TODOs in string fields +* Make toolbar sticky on top of finding, section and template editor +* Separate scrollbars for side menu and main content +* Rework PDF Viewer + + +## v0.14 - 2023-01-03 +* Data-at-rest encryption for files and sensitive DB data +* Use Session cookies instead of JWT tokens +* Support two factor authentication with FIDO2, TOTP and Backup Codes +* Add user role and permissions for system users +* Support encrypting backups + + +## v0.13 - 2022-12-16 +* Add logo and favicon +* Add per-project user tags +* UI Improvement: create finding dialog: reset template search input after closing dialog, set search query as finding title for new empty findings +* UI Improvement: allow text selection in Markdown editor preview area + + +## v0.12 - 2022-12-05 +* Provide some standard fonts in the docker container +* Customize designs per project +* Allow force changing designs of projects if the old and new design are incompatible +* Update Chromium to fix CVE-2022-4262 (high) + + +## v0.11 - 2022-11-25 +* Compress images to reduce storage size and PDF size +* Manual highlighting of text in markdown code blocks +* Add review status to sections, findings and templates +* UI improvements: rework texts, add icons, more detailed error messages, group warnings by type in the publish page +* Fix rendering of lists of users containing imported project users + + +## Initial - 2022-11-16 +* Begin of changelog +* Collaboratively write pentesting reports +* Render reports to PDF +* Customize report designs to your needs +* Finding Template library +* Export and import designs/templates/projects to share data +* Multi Language support: Engilsh and German +* Spell checking +* Edit locking +* Drag-and-drop image upload +* PDF encryption +* and many more features diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..5ea1ea7 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,147 @@ +FROM node:16-alpine@sha256:710a2c192ca426e03e4f3ec1869e5c29db855eb6969b74e6c50fd270ffccd3f1 AS pdfviewer-dev +WORKDIR /app/packages/pdfviewer/ +COPY packages/pdfviewer/package.json packages/pdfviewer/package-lock.json /app/packages/pdfviewer// +RUN npm install + +FROM pdfviewer-dev AS pdfviewer +COPY packages/pdfviewer /app/packages/pdfviewer// +RUN npm run build + + + + + + + +FROM node:16-alpine@sha256:710a2c192ca426e03e4f3ec1869e5c29db855eb6969b74e6c50fd270ffccd3f1 AS frontend-dev + +WORKDIR /app/packages/markdown/ +COPY packages/markdown/package.json packages/markdown/package-lock.json /app/packages/markdown/ +RUN npm install + +WORKDIR /app/frontend +COPY frontend/package.json frontend/package-lock.json /app/frontend/ +RUN npm install + + +FROM frontend-dev AS frontend-test +COPY packages/markdown/ /app/packages/markdown/ +COPY frontend /app/frontend/ +COPY --from=pdfviewer /app/packages/pdfviewer/dist/ /app/frontend/static/static/pdfviewer/ +CMD npm run test + + +FROM frontend-test AS frontend +RUN npm run build + + + + + + + +FROM node:16-alpine@sha256:710a2c192ca426e03e4f3ec1869e5c29db855eb6969b74e6c50fd270ffccd3f1 AS rendering-dev + +WORKDIR /app/packages/markdown/ +COPY packages/markdown/package.json packages/markdown/package-lock.json /app/packages/markdown/ +RUN npm install + +WORKDIR /app/rendering/ +COPY rendering/package.json rendering/package-lock.json /app/rendering/ +RUN npm install + + +FROM rendering-dev AS rendering +COPY rendering /app/rendering/ +COPY packages/markdown/ /app/packages/markdown/ +RUN npm run build + + + + +FROM python:3.10-slim-bullseye@sha256:89648909125f37eeff6dee35491e6295c77b76c42aa1aff2523478990e73d3fe AS api-dev + +# Install system dependencies required by weasyprint and chromium +RUN apt-get update && apt-get install -y --no-install-recommends \ + chromium \ + curl \ + fontconfig \ + fonts-noto \ + fonts-noto-mono \ + fonts-noto-ui-core \ + fonts-noto-color-emoji \ + gpg \ + gpg-agent \ + libpango-1.0-0 \ + libpangoft2-1.0-0 \ + unzip \ + wget \ + postgresql-client \ + && rm -rf /var/lib/apt/lists/* + +# Install fonts +WORKDIR /app/api/ +COPY api/download_fonts.sh api/generate_notice.sh api/NOTICE /app/api/ +RUN chmod +x /app/api/download_fonts.sh && /app/api/download_fonts.sh + +# Install python packages +ENV PYTHONUNBUFFERED=on \ + PYTHONDONTWRITEBYTECODE=on \ + CHROMIUM_EXECUTABLE=/usr/lib/chromium/chromium +WORKDIR /app/api/ +COPY api/requirements.txt /app/api/requirements.txt +RUN pip install -r /app/api/requirements.txt + +# Configure application +ARG VERSION=dev +ENV VERSION=${VERSION} \ + DEBUG=off \ + MEDIA_ROOT=/data/ \ + SERVER_WORKERS=4 \ + PDF_RENDER_SCRIPT_PATH=/app/rendering/dist/bundle.js + +# Copy license and changelog +COPY LICENSE CHANGELOG.md /app/ + +# Start server +EXPOSE 8000 +CMD python3 manage.py migrate && \ + gunicorn \ + --bind=:8000 --worker-class=uvicorn.workers.UvicornWorker --workers=${SERVER_WORKERS} \ + --max-requests=500 --max-requests-jitter=100 \ + reportcreator_api.conf.asgi:application + + + +FROM api-dev as api-prebuilt + +# Copy source code (including pre-build static files) +COPY api/src /app/api +COPY rendering/dist /app/rendering/dist/ + +# Create data directory +RUN mkdir /data && chown 1000:1000 /data && chmod 777 /data +VOLUME [ "/data" ] +USER 1000 + + + +FROM api-dev AS api-test +# Copy source code +COPY api/src /app/api + +# Copy generated template rendering script +COPY --from=rendering /app/rendering/dist /app/rendering/dist/ +CMD pytest + + +FROM api-test as api +# Generate static frontend files +# Post-process django files (for admin, API browser) and post-process them (e.g. add unique file hash) +# Do not post-process nuxt files, because they already have hash names (and django failes to post-process them) +USER root +RUN python3 manage.py collectstatic --no-input --clear +COPY --from=frontend /app/frontend/dist/ /app/api/frontend/ +RUN python3 manage.py collectstatic --no-input --no-post-process \ + && python3 -m whitenoise.compress /app/api/static/ +USER 1000 diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..4ffd4bf --- /dev/null +++ b/LICENSE @@ -0,0 +1,117 @@ +# SysReptor Community License 1.0 (SysReptorL) +## Acceptance + +In order to get any Permissions to Use the Software under the +SysReptorL, you must agree to it as both strict obligations +and conditions to all your Licenses. + +## Copyright License + +The licensor grants you a non-exclusive copyright Permission +to Use the Software for everything you might do with the Software +that would otherwise infringe the licensor's copyright in it for +any permitted purpose, other than distributing the software or +making changes or new works based on the Software. Attempts to +circumvent technical License restrictions are prohibited (e.g. +to unlock or extend functionalities), even if they result from +errors in the Software. + +## Patent License + +The licensor grants you a non-exclusive patent License for the +Software that covers patent claims the licensor can license, or +becomes able to license, that you would infringe by using the +Software after its Intended Use. + +## Internal Business Use + +Use of the Software for the internal business operations of +you and your Company is use for a permitted purpose. + +## Personal Uses + +Personal use for research, experiment, and testing for the +benefit of public knowledge, personal study, private entertainment, +hobby projects, amateur pursuits, or religious observance, +without any anticipated commercial application, is use for a +permitted purpose. + +## Fair Use + +You may have "**Fair Use**" rights for the Software under the law. +The SysReptorL does not limit them unless otherwise agreed. + +Pursuant to Section 40d of the Act on Copyright and Related +Rights (Urheberrechtsgesetz, UrhG), computer programs may be +edited and reproduced within the framework of the Fair Use of +works to the extent that this is necessary for the Intended +Use of the Software by the person entitled to use it. The +**Intended Use** is limited to the permitted purpose of the Software +in accordance with the SysReptorL. + +## No Other Rights + +The SysReptorL does not allow you to sublicense or transfer +any of your Licenses to anyone else or prevent the licensor +from granting Licenses to anyone else. The SysReptorL does not +imply any other Licenses than those mentioned therein. + +## Patent Defense + +If you make any written claim that the Software infringes or +contributes to infringement of any patent, your patent License +for the Software granted under this SysReptorL ends immediately. If +your Company makes such a claim, your patent License ends +immediately for work on behalf of your Company. Irrespective of the +withdrawal of Permission to Use the Software, we reserve the right +to assert claims for damages. + +## Violations + +The first time you are notified in writing that you have +violated any of these terms, or done anything with the software +not covered by your licenses, your licenses can nonetheless +continue if you come into full compliance with these terms, +and take practical steps to correct past violations, within +32 days of receiving notice. Otherwise, all your licenses +end immediately. + +## No Liability + +***As far as the law allows, the Software comes “as is”, without +any warranty or condition, and the licensor will not be liable +to you for any damages arising out of this SysReptorL or the use +or nature of the Software, under any kind of legal claim.*** + +## Definitions + +The SysReptor Community License 1.0 (**SysReptorL**) is granted by +Syslifters GmbH, FN 578505v, registered office Göllersdorf +(**Syslifters**; **we**; **licensor**) to **you**. + +**License**: Is the overall term for the authorization to use the +Software. The term "License" says nothing about the copyright +classification. + +**Software**: is the software the licensor makes available under +these terms. + +**Permission to Use the Software** (*Werknutzungsbewilligung*): +Non-exclusive copyright Permission to Use the Software. **Use** +means anything you do with the software requiring one of your +licenses. + +**Your Company**: Is any legal entity, sole proprietorship, or +other kind of organization that you work for, plus all organizations +that have control over, are under the control of, or are under common +control with that organization. **Control** means ownership of +substantially all the assets of an entity, or the power to direct its +management and policies by vote, contract, or otherwise. Control can +be direct or indirect. + +**Your licenses** are all the licenses granted to you for the +software under these terms. + + +------------ +**Last Updated:** 24 March 2023 \ No newline at end of file diff --git a/README.md b/README.md new file mode 100644 index 0000000..c7f8e44 --- /dev/null +++ b/README.md @@ -0,0 +1,53 @@ + + + + + + + + + + + + + + + + + +
+ +# SysReptor - Pentest Reporting Easy As Pie + +📝 Write in markdown
+✏️ Design in HTML/VueJS
+⚙️ Render your report to PDF
+🚀 Fully customizable
+💻 Self-hosted or Cloud
+🎉 No need for Word
+ +Happy Hacking! :) + + +* Playground: [Try it out!](https://cloud.sysreptor.com/demo) +* Docs: https://docs.sysreptor.com/ +* Features: https://docs.sysreptor.com/features-and-pricing/ + + +## Installation +You will need: +* Ubuntu +* Latest Docker with Docker Compose v2 + +```bash +curl -s https://docs.sysreptor.com/install.sh | bash +``` + +Access your application at http://localhost:8000/. + + +![Create finding from template](https://docs.sysreptor.com/images/create_finding_from_template.gif) + +![Export report as PDF](https://docs.sysreptor.com/images/export_project.gif) + + diff --git a/api/.gitignore b/api/.gitignore new file mode 100644 index 0000000..0c99090 --- /dev/null +++ b/api/.gitignore @@ -0,0 +1,147 @@ +# Django # +*.log +*.pot +*.pyc +__pycache__ +db.sqlite3 +media + +# Backup files # +*.bak + +# If you are using PyCharm # +# User-specific stuff +.idea/**/workspace.xml +.idea/**/tasks.xml +.idea/**/usage.statistics.xml +.idea/**/dictionaries +.idea/**/shelf + +# AWS User-specific +.idea/**/aws.xml + +# Generated files +.idea/**/contentModel.xml + +# Sensitive or high-churn files +.idea/**/dataSources/ +.idea/**/dataSources.ids +.idea/**/dataSources.local.xml +.idea/**/sqlDataSources.xml +.idea/**/dynamic.xml +.idea/**/uiDesigner.xml +.idea/**/dbnavigator.xml + +# Gradle +.idea/**/gradle.xml +.idea/**/libraries + +# File-based project format +*.iws + +# IntelliJ +out/ + +# JIRA plugin +atlassian-ide-plugin.xml + +# Python # +*.py[cod] +*$py.class + +# Distribution / packaging +.Python build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +*.egg-info/ +.installed.cfg +*.egg +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.coverage +.coverage.* +.cache +.pytest_cache/ +nosetests.xml +coverage.xml +*.cover +.hypothesis/ +**/junit.xml +test-reports + +# Jupyter Notebook +.ipynb_checkpoints + +# pyenv +.python-version + +# celery +celerybeat-schedule.* + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ + +# Sublime Text # +*.tmlanguage.cache +*.tmPreferences.cache +*.stTheme.cache +*.sublime-workspace +*.sublime-project + +# sftp configuration file +sftp-config.json + +# Package control specific files Package +Control.last-run +Control.ca-list +Control.ca-bundle +Control.system-ca-bundle +GitHub.sublime-settings + +# Visual Studio Code # +.vscode/* +!.vscode/settings.json +!.vscode/tasks.json +!.vscode/launch.json +!.vscode/extensions.json +.history + +data +src/static +src/frontend/* +src/frontend/*/static/* +!src/frontend/index.html +!src/frontend/static +!src/frontend/static/.gitkeep diff --git a/api/NOTICE b/api/NOTICE new file mode 100644 index 0000000..1ea19cc --- /dev/null +++ b/api/NOTICE @@ -0,0 +1,7362 @@ +Authlib +1.2.0 +BSD License +BSD 3-Clause License + +Copyright (c) 2017, Hsiaoming Yang +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +* Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +Brotli +1.0.9 +MIT License +Copyright (c) 2009, 2010, 2013-2016 by the Brotli Authors. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. + + +Django +4.2b1 +BSD License +Copyright (c) Django Software Foundation and individual contributors. +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, +are permitted provided that the following conditions are met: + + 1. Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + + 2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + + 3. Neither the name of Django nor the names of its contributors may be used + to endorse or promote products derived from this software without + specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR +ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON +ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +Pillow +9.4.0 +Historical Permission Notice and Disclaimer (HPND) +The Python Imaging Library (PIL) is + + Copyright © 1997-2011 by Secret Labs AB + Copyright © 1995-2011 by Fredrik Lundh + +Pillow is the friendly PIL fork. It is + + Copyright © 2010-2023 by Alex Clark and contributors + +Like PIL, Pillow is licensed under the open source HPND License: + +By obtaining, using, and/or copying this software and/or its associated +documentation, you agree that you have read, understood, and will comply +with the following terms and conditions: + +Permission to use, copy, modify, and distribute this software and its +associated documentation for any purpose and without fee is hereby granted, +provided that the above copyright notice appears in all copies, and that +both that copyright notice and this permission notice appear in supporting +documentation, and that the name of Secret Labs AB or the author not be +used in advertising or publicity pertaining to distribution of the software +without specific, written prior permission. + +SECRET LABS AB AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS +SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. +IN NO EVENT SHALL SECRET LABS AB OR THE AUTHOR BE LIABLE FOR ANY SPECIAL, +INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. + + +---- + +BROTLI + +Copyright (c) 2009, 2010, 2013-2016 by the Brotli Authors. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. + + +---- + +BZIP2 + + +-------------------------------------------------------------------------- + +This program, "bzip2", the associated library "libbzip2", and all +documentation, are copyright (C) 1996-2019 Julian R Seward. All +rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions +are met: + +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + +2. The origin of this software must not be misrepresented; you must + not claim that you wrote the original software. If you use this + software in a product, an acknowledgment in the product + documentation would be appreciated but is not required. + +3. Altered source versions must be plainly marked as such, and must + not be misrepresented as being the original software. + +4. The name of the author may not be used to endorse or promote + products derived from this software without specific prior written + permission. + +THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS +OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY +DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE +GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, +WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +Julian Seward, jseward@acm.org +bzip2/libbzip2 version 1.0.8 of 13 July 2019 + +-------------------------------------------------------------------------- + + +---- + +FREETYPE2 + +The FreeType 2 font engine is copyrighted work and cannot be used +legally without a software license. In order to make this project +usable to a vast majority of developers, we distribute it under two +mutually exclusive open-source licenses. + +This means that *you* must choose *one* of the two licenses described +below, then obey all its terms and conditions when using FreeType 2 in +any of your projects or products. + + - The FreeType License, found in the file `FTL.TXT', which is similar + to the original BSD license *with* an advertising clause that forces + you to explicitly cite the FreeType project in your product's + documentation. All details are in the license file. This license + is suited to products which don't use the GNU General Public + License. + + Note that this license is compatible to the GNU General Public + License version 3, but not version 2. + + - The GNU General Public License version 2, found in `GPLv2.TXT' (any + later version can be used also), for programs which already use the + GPL. Note that the FTL is incompatible with GPLv2 due to its + advertisement clause. + +The contributed BDF and PCF drivers come with a license similar to that +of the X Window System. It is compatible to the above two licenses (see +file src/bdf/README and src/pcf/README). The same holds for the files +`fthash.c' and `fthash.h'; their code was part of the BDF driver in +earlier FreeType versions. + +The gzip module uses the zlib license (see src/gzip/zlib.h) which too is +compatible to the above two licenses. + +The MD5 checksum support (only used for debugging in development builds) +is in the public domain. + + +---- + +HARFBUZZ + +HarfBuzz is licensed under the so-called "Old MIT" license. Details follow. +For parts of HarfBuzz that are licensed under different licenses see individual +files names COPYING in subdirectories where applicable. + +Copyright © 2010,2011,2012,2013,2014,2015,2016,2017,2018,2019,2020 Google, Inc. +Copyright © 2018,2019,2020 Ebrahim Byagowi +Copyright © 2019,2020 Facebook, Inc. +Copyright © 2012 Mozilla Foundation +Copyright © 2011 Codethink Limited +Copyright © 2008,2010 Nokia Corporation and/or its subsidiary(-ies) +Copyright © 2009 Keith Stribley +Copyright © 2009 Martin Hosken and SIL International +Copyright © 2007 Chris Wilson +Copyright © 2006 Behdad Esfahbod +Copyright © 2005 David Turner +Copyright © 2004,2007,2008,2009,2010 Red Hat, Inc. +Copyright © 1998-2004 David Turner and Werner Lemberg + +For full copyright notices consult the individual files in the package. + + +Permission is hereby granted, without written agreement and without +license or royalty fees, to use, copy, modify, and distribute this +software and its documentation for any purpose, provided that the +above copyright notice and the following two paragraphs appear in +all copies of this software. + +IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR +DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES +ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN +IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH +DAMAGE. + +THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, +BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND +FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS +ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO +PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS. + + +---- + +LCMS2 + +Little CMS +Copyright (c) 1998-2020 Marti Maria Saguer + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + + +---- + +LIBJPEG + +1. We don't promise that this software works. (But if you find any bugs, + please let us know!) +2. You can use this software for whatever you want. You don't have to pay us. +3. You may not pretend that you wrote this software. If you use it in a + program, you must acknowledge somewhere in your documentation that + you've used the IJG code. + +In legalese: + +The authors make NO WARRANTY or representation, either express or implied, +with respect to this software, its quality, accuracy, merchantability, or +fitness for a particular purpose. This software is provided "AS IS", and you, +its user, assume the entire risk as to its quality and accuracy. + +This software is copyright (C) 1991-2020, Thomas G. Lane, Guido Vollbeding. +All Rights Reserved except as specified below. + +Permission is hereby granted to use, copy, modify, and distribute this +software (or portions thereof) for any purpose, without fee, subject to these +conditions: +(1) If any part of the source code for this software is distributed, then this +README file must be included, with this copyright and no-warranty notice +unaltered; and any additions, deletions, or changes to the original files +must be clearly indicated in accompanying documentation. +(2) If only executable code is distributed, then the accompanying +documentation must state that "this software is based in part on the work of +the Independent JPEG Group". +(3) Permission for use of this software is granted only if the user accepts +full responsibility for any undesirable consequences; the authors accept +NO LIABILITY for damages of any kind. + +These conditions apply to any software derived from or based on the IJG code, +not just to the unmodified library. If you use our work, you ought to +acknowledge us. + +Permission is NOT granted for the use of any IJG author's name or company name +in advertising or publicity relating to this software or products derived from +it. This software may be referred to only as "the Independent JPEG Group's +software". + +We specifically permit and encourage the use of this software as the basis of +commercial products, provided that all warranty or liability claims are +assumed by the product vendor. + + +---- + +LIBLZMA + +XZ Utils Licensing +================== + + Different licenses apply to different files in this package. Here + is a rough summary of which licenses apply to which parts of this + package (but check the individual files to be sure!): + + - liblzma is in the public domain. + + - xz, xzdec, and lzmadec command line tools are in the public + domain unless GNU getopt_long had to be compiled and linked + in from the lib directory. The getopt_long code is under + GNU LGPLv2.1+. + + - The scripts to grep, diff, and view compressed files have been + adapted from gzip. These scripts and their documentation are + under GNU GPLv2+. + + - All the documentation in the doc directory and most of the + XZ Utils specific documentation files in other directories + are in the public domain. + + - Translated messages are in the public domain. + + - The build system contains public domain files, and files that + are under GNU GPLv2+ or GNU GPLv3+. None of these files end up + in the binaries being built. + + - Test files and test code in the tests directory, and debugging + utilities in the debug directory are in the public domain. + + - The extra directory may contain public domain files, and files + that are under various free software licenses. + + You can do whatever you want with the files that have been put into + the public domain. If you find public domain legally problematic, + take the previous sentence as a license grant. If you still find + the lack of copyright legally problematic, you have too many + lawyers. + + As usual, this software is provided "as is", without any warranty. + + If you copy significant amounts of public domain code from XZ Utils + into your project, acknowledging this somewhere in your software is + polite (especially if it is proprietary, non-free software), but + naturally it is not legally required. Here is an example of a good + notice to put into "about box" or into documentation: + + This software includes code from XZ Utils . + + The following license texts are included in the following files: + - COPYING.LGPLv2.1: GNU Lesser General Public License version 2.1 + - COPYING.GPLv2: GNU General Public License version 2 + - COPYING.GPLv3: GNU General Public License version 3 + + Note that the toolchain (compiler, linker etc.) may add some code + pieces that are copyrighted. Thus, it is possible that e.g. liblzma + binary wouldn't actually be in the public domain in its entirety + even though it contains no copyrighted code from the XZ Utils source + package. + + If you have questions, don't hesitate to ask the author(s) for more + information. + + +---- + +LIBPNG + +COPYRIGHT NOTICE, DISCLAIMER, and LICENSE +========================================= + +PNG Reference Library License version 2 +--------------------------------------- + + * Copyright (c) 1995-2019 The PNG Reference Library Authors. + * Copyright (c) 2018-2019 Cosmin Truta. + * Copyright (c) 2000-2002, 2004, 2006-2018 Glenn Randers-Pehrson. + * Copyright (c) 1996-1997 Andreas Dilger. + * Copyright (c) 1995-1996 Guy Eric Schalnat, Group 42, Inc. + +The software is supplied "as is", without warranty of any kind, +express or implied, including, without limitation, the warranties +of merchantability, fitness for a particular purpose, title, and +non-infringement. In no event shall the Copyright owners, or +anyone distributing the software, be liable for any damages or +other liability, whether in contract, tort or otherwise, arising +from, out of, or in connection with the software, or the use or +other dealings in the software, even if advised of the possibility +of such damage. + +Permission is hereby granted to use, copy, modify, and distribute +this software, or portions hereof, for any purpose, without fee, +subject to the following restrictions: + + 1. The origin of this software must not be misrepresented; you + must not claim that you wrote the original software. If you + use this software in a product, an acknowledgment in the product + documentation would be appreciated, but is not required. + + 2. Altered source versions must be plainly marked as such, and must + not be misrepresented as being the original software. + + 3. This Copyright notice may not be removed or altered from any + source or altered source distribution. + + +PNG Reference Library License version 1 (for libpng 0.5 through 1.6.35) +----------------------------------------------------------------------- + +libpng versions 1.0.7, July 1, 2000, through 1.6.35, July 15, 2018 are +Copyright (c) 2000-2002, 2004, 2006-2018 Glenn Randers-Pehrson, are +derived from libpng-1.0.6, and are distributed according to the same +disclaimer and license as libpng-1.0.6 with the following individuals +added to the list of Contributing Authors: + + Simon-Pierre Cadieux + Eric S. Raymond + Mans Rullgard + Cosmin Truta + Gilles Vollant + James Yu + Mandar Sahastrabuddhe + Google Inc. + Vadim Barkov + +and with the following additions to the disclaimer: + + There is no warranty against interference with your enjoyment of + the library or against infringement. There is no warranty that our + efforts or the library will fulfill any of your particular purposes + or needs. This library is provided with all faults, and the entire + risk of satisfactory quality, performance, accuracy, and effort is + with the user. + +Some files in the "contrib" directory and some configure-generated +files that are distributed with libpng have other copyright owners, and +are released under other open source licenses. + +libpng versions 0.97, January 1998, through 1.0.6, March 20, 2000, are +Copyright (c) 1998-2000 Glenn Randers-Pehrson, are derived from +libpng-0.96, and are distributed according to the same disclaimer and +license as libpng-0.96, with the following individuals added to the +list of Contributing Authors: + + Tom Lane + Glenn Randers-Pehrson + Willem van Schaik + +libpng versions 0.89, June 1996, through 0.96, May 1997, are +Copyright (c) 1996-1997 Andreas Dilger, are derived from libpng-0.88, +and are distributed according to the same disclaimer and license as +libpng-0.88, with the following individuals added to the list of +Contributing Authors: + + John Bowler + Kevin Bracey + Sam Bushell + Magnus Holmgren + Greg Roelofs + Tom Tanner + +Some files in the "scripts" directory have other copyright owners, +but are released under this license. + +libpng versions 0.5, May 1995, through 0.88, January 1996, are +Copyright (c) 1995-1996 Guy Eric Schalnat, Group 42, Inc. + +For the purposes of this copyright and license, "Contributing Authors" +is defined as the following set of individuals: + + Andreas Dilger + Dave Martindale + Guy Eric Schalnat + Paul Schmidt + Tim Wegner + +The PNG Reference Library is supplied "AS IS". The Contributing +Authors and Group 42, Inc. disclaim all warranties, expressed or +implied, including, without limitation, the warranties of +merchantability and of fitness for any purpose. The Contributing +Authors and Group 42, Inc. assume no liability for direct, indirect, +incidental, special, exemplary, or consequential damages, which may +result from the use of the PNG Reference Library, even if advised of +the possibility of such damage. + +Permission is hereby granted to use, copy, modify, and distribute this +source code, or portions hereof, for any purpose, without fee, subject +to the following restrictions: + + 1. The origin of this source code must not be misrepresented. + + 2. Altered versions must be plainly marked as such and must not + be misrepresented as being the original source. + + 3. This Copyright notice may not be removed or altered from any + source or altered source distribution. + +The Contributing Authors and Group 42, Inc. specifically permit, +without fee, and encourage the use of this source code as a component +to supporting the PNG file format in commercial products. If you use +this source code in a product, acknowledgment is not required but would +be appreciated. + + +---- + +LIBTIFF + +Copyright (c) 1988-1997 Sam Leffler +Copyright (c) 1991-1997 Silicon Graphics, Inc. + +Permission to use, copy, modify, distribute, and sell this software and +its documentation for any purpose is hereby granted without fee, provided +that (i) the above copyright notices and this permission notice appear in +all copies of the software and related documentation, and (ii) the names of +Sam Leffler and Silicon Graphics may not be used in any advertising or +publicity relating to the software without the specific, prior written +permission of Sam Leffler and Silicon Graphics. + +THE SOFTWARE IS PROVIDED "AS-IS" AND WITHOUT WARRANTY OF ANY KIND, +EXPRESS, IMPLIED OR OTHERWISE, INCLUDING WITHOUT LIMITATION, ANY +WARRANTY OF MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE. + +IN NO EVENT SHALL SAM LEFFLER OR SILICON GRAPHICS BE LIABLE FOR +ANY SPECIAL, INCIDENTAL, INDIRECT OR CONSEQUENTIAL DAMAGES OF ANY KIND, +OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, +WHETHER OR NOT ADVISED OF THE POSSIBILITY OF DAMAGE, AND ON ANY THEORY OF +LIABILITY, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE +OF THIS SOFTWARE. + + +---- + +LIBWEBP + +Copyright (c) 2010, Google Inc. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in + the documentation and/or other materials provided with the + distribution. + + * Neither the name of Google nor the names of its contributors may + be used to endorse or promote products derived from this software + without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +---- + +OPENJPEG + +* + * The copyright in this software is being made available under the 2-clauses + * BSD License, included below. This software may be subject to other third + * party and contributor rights, including patent rights, and no such rights + * are granted under this license. + * + * Copyright (c) 2002-2014, Universite catholique de Louvain (UCL), Belgium + * Copyright (c) 2002-2014, Professor Benoit Macq + * Copyright (c) 2003-2014, Antonin Descampe + * Copyright (c) 2003-2009, Francois-Olivier Devaux + * Copyright (c) 2005, Herve Drolon, FreeImage Team + * Copyright (c) 2002-2003, Yannick Verschueren + * Copyright (c) 2001-2003, David Janssens + * Copyright (c) 2011-2012, Centre National d'Etudes Spatiales (CNES), France + * Copyright (c) 2012, CS Systemes d'Information, France + * + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions + * are met: + * 1. Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * 2. Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in the + * documentation and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS `AS IS' + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + + +---- + +RAQM + +The MIT License (MIT) + +Copyright © 2015 Information Technology Authority (ITA) +Copyright © 2016 Khaled Hosny + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + + +---- + +XAU + +Copyright 1988, 1993, 1994, 1998 The Open Group + +Permission to use, copy, modify, distribute, and sell this software and its +documentation for any purpose is hereby granted without fee, provided that +the above copyright notice appear in all copies and that both that +copyright notice and this permission notice appear in supporting +documentation. + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +OPEN GROUP BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN +AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +Except as contained in this notice, the name of The Open Group shall not be +used in advertising or otherwise to promote the sale, use or other dealings +in this Software without prior written authorization from The Open Group. + + +---- + +XCB + +Copyright (C) 2001-2006 Bart Massey, Jamey Sharp, and Josh Triplett. +All Rights Reserved. + +Permission is hereby granted, free of charge, to any person +obtaining a copy of this software and associated +documentation files (the "Software"), to deal in the +Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, +sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall +be included in all copies or substantial portions of the +Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY +KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR +PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS +BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. + +Except as contained in this notice, the names of the authors +or their institutions shall not be used in advertising or +otherwise to promote the sale, use or other dealings in this +Software without prior written authorization from the +authors. + + +---- + +XDMCP + +Copyright 1989, 1998 The Open Group + +Permission to use, copy, modify, distribute, and sell this software and its +documentation for any purpose is hereby granted without fee, provided that +the above copyright notice appear in all copies and that both that +copyright notice and this permission notice appear in supporting +documentation. + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +OPEN GROUP BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN +AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +Except as contained in this notice, the name of The Open Group shall not be +used in advertising or otherwise to promote the sale, use or other dealings +in this Software without prior written authorization from The Open Group. + +Author: Keith Packard, MIT X Consortium + + +---- + +ZLIB + + (C) 1995-2017 Jean-loup Gailly and Mark Adler + + This software is provided 'as-is', without any express or implied + warranty. In no event will the authors be held liable for any damages + arising from the use of this software. + + Permission is granted to anyone to use this software for any purpose, + including commercial applications, and to alter it and redistribute it + freely, subject to the following restrictions: + + 1. The origin of this software must not be misrepresented; you must not + claim that you wrote the original software. If you use this software + in a product, an acknowledgment in the product documentation would be + appreciated but is not required. + 2. Altered source versions must be plainly marked as such, and must not be + misrepresented as being the original software. + 3. This notice may not be removed or altered from any source distribution. + + Jean-loup Gailly Mark Adler + jloup@gzip.org madler@alumni.caltech.edu + +If you use the zlib library in a product, we would appreciate *not* receiving +lengthy legal documents to sign. The sources are provided for free but without +warranty of any kind. The library has been entirely written by Jean-loup +Gailly and Mark Adler; it does not include third-party code. + +If you redistribute modified sources, we would appreciate that you include in +the file ChangeLog history information documenting your changes. Please read +the FAQ for more information on the distribution of modified source versions. + + +PyJWT +2.6.0 +MIT License +The MIT License (MIT) + +Copyright (c) 2015-2022 José Padilla + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + + +adrf +0.1.0 +MIT License +Copyright (c) 2022 Enrico Massa + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + + +Copyright © 2011-present, [Encode OSS Ltd](https://www.encode.io/). +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +* Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +amqp +5.1.1 +BSD License +Copyright (c) 2015-2016 Ask Solem & contributors. All rights reserved. +Copyright (c) 2012-2014 GoPivotal, Inc. All rights reserved. +Copyright (c) 2009, 2010, 2011, 2012 Ask Solem, and individual contributors. All rights reserved. +Copyright (C) 2007-2008 Barry Pederson . All rights reserved. + +py-amqp is licensed under The BSD License (3 Clause, also known as +the new BSD license). The license is an OSI approved Open Source +license and is GPL-compatible(1). + +The license text can also be found here: +http://www.opensource.org/licenses/BSD-3-Clause + +License +======= + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Ask Solem, nor the + names of its contributors may be used to endorse or promote products + derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL Ask Solem OR CONTRIBUTORS +BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +POSSIBILITY OF SUCH DAMAGE. + + +Footnotes +========= +(1) A GPL-compatible license makes it possible to + combine Celery with other software that is released + under the GPL, it does not mean that we're distributing + Celery under the GPL license. The BSD license, unlike the GPL, + let you distribute a modified version without making your + changes open source. + + +anyio +3.6.2 +MIT License +The MIT License (MIT) + +Copyright (c) 2018 Alex Grönholm + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + + +asgiref +3.6.0 +BSD License +Copyright (c) Django Software Foundation and individual contributors. +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, +are permitted provided that the following conditions are met: + + 1. Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + + 2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + + 3. Neither the name of Django nor the names of its contributors may be used + to endorse or promote products derived from this software without + specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR +ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON +ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +attrs +22.2.0 +MIT License +The MIT License (MIT) + +Copyright (c) 2015 Hynek Schlawack and the attrs contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + + +billiard +3.6.4.0 +BSD License +Copyright (c) 2006-2008, R Oudkerk and Contributors + +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions +are met: + +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. +2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. +3. Neither the name of author nor the names of any contributors may be + used to endorse or promote products derived from this software + without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS +OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) +HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY +OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF +SUCH DAMAGE. + + + +boto3 +1.26.5 +Apache Software License + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + +botocore +1.29.94 +Apache Software License + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + +celery +5.2.7 +BSD License +Copyright (c) 2015-2016 Ask Solem & contributors. All rights reserved. +Copyright (c) 2012-2014 GoPivotal, Inc. All rights reserved. +Copyright (c) 2009, 2010, 2011, 2012 Ask Solem, and individual contributors. All rights reserved. + +Celery is licensed under The BSD License (3 Clause, also known as +the new BSD license). The license is an OSI approved Open Source +license and is GPL-compatible(1). + +The license text can also be found here: +http://www.opensource.org/licenses/BSD-3-Clause + +License +======= + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Ask Solem, nor the + names of its contributors may be used to endorse or promote products + derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL Ask Solem OR CONTRIBUTORS +BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +POSSIBILITY OF SUCH DAMAGE. + +Documentation License +===================== + +The documentation portion of Celery (the rendered contents of the +"docs" directory of a software distribution or checkout) is supplied +under the "Creative Commons Attribution-ShareAlike 4.0 +International" (CC BY-SA 4.0) License as described by +https://creativecommons.org/licenses/by-sa/4.0/ + +Footnotes +========= +(1) A GPL-compatible license makes it possible to + combine Celery with other software that is released + under the GPL, it does not mean that we're distributing + Celery under the GPL license. The BSD license, unlike the GPL, + let you distribute a modified version without making your + changes open source. + + +certifi +2022.12.7 +Mozilla Public License 2.0 (MPL 2.0) +This package contains a modified version of ca-bundle.crt: + +ca-bundle.crt -- Bundle of CA Root Certificates + +Certificate data from Mozilla as of: Thu Nov 3 19:04:19 2011# +This is a bundle of X.509 certificates of public Certificate Authorities +(CA). These were automatically extracted from Mozilla's root certificates +file (certdata.txt). This file can be found in the mozilla source tree: +https://hg.mozilla.org/mozilla-central/file/tip/security/nss/lib/ckfw/builtins/certdata.txt +It contains the certificates in PEM format and therefore +can be directly used with curl / libcurl / php_curl, or with +an Apache+mod_ssl webserver for SSL client authentication. +Just configure this file as the SSLCACertificateFile.# + +***** BEGIN LICENSE BLOCK ***** +This Source Code Form is subject to the terms of the Mozilla Public License, +v. 2.0. If a copy of the MPL was not distributed with this file, You can obtain +one at http://mozilla.org/MPL/2.0/. + +***** END LICENSE BLOCK ***** +@(#) $RCSfile: certdata.txt,v $ $Revision: 1.80 $ $Date: 2011/11/03 15:11:58 $ + + +cffi +1.15.1 +MIT License + +Except when otherwise stated (look for LICENSE files in directories or +information at the beginning of each file) all software and +documentation is licensed as follows: + + The MIT License + + Permission is hereby granted, free of charge, to any person + obtaining a copy of this software and associated documentation + files (the "Software"), to deal in the Software without + restriction, including without limitation the rights to use, + copy, modify, merge, publish, distribute, sublicense, and/or + sell copies of the Software, and to permit persons to whom the + Software is furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included + in all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS + OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL + THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER + DEALINGS IN THE SOFTWARE. + + + +charset-normalizer +3.1.0 +MIT License +MIT License + +Copyright (c) 2019 TAHRI Ahmed R. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +click +8.1.3 +BSD License +Copyright 2014 Pallets + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A +PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED +TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +click-didyoumean +0.3.0 +MIT License +Copyright (c) 2016 Timo Furrer + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. + + +click-plugins +1.1.1 +BSD License +New BSD License + +Copyright (c) 2015-2019, Kevin D. Wurster, Sean C. Gillies +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +* Neither click-plugins nor the names of its contributors may not be used to + endorse or promote products derived from this software without specific prior + written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +click-repl +0.2.0 +MIT +Copyright (c) 2014-2015 Markus Unterwaditzer & contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + + +coverage +7.2.2 +Apache Software License + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + +cryptography +39.0.2 +Apache Software License; BSD License +This software is made available under the terms of *either* of the licenses +found in LICENSE.APACHE or LICENSE.BSD. Contributions to cryptography are made +under the terms of *both* these licenses. + +The code used in the OS random engine is derived from CPython, and is licensed +under the terms of the PSF License Agreement. + + +cssselect2 +0.7.0 +BSD License +BSD 3-Clause License + +Copyright (c) 2012-2018, Simon Sapin and contributors (see AUTHORS). +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +* Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +debugpy +1.6.3 +Eclipse Public License 2.0 (EPL-2.0); MIT License + debugpy + + Copyright (c) Microsoft Corporation + All rights reserved. + + MIT License + + Permission is hereby granted, free of charge, to any person obtaining a copy of + this software and associated documentation files (the "Software"), to deal in + the Software without restriction, including without limitation the rights to + use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of + the Software, and to permit persons to whom the Software is furnished to do so, + subject to the following conditions: + + The above copyright notice and this permission notice shall be included in all + copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS + FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR + COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER + IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN + CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + + + +deprecation +2.1.0 +Apache Software License + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright {yyyy} {name of copyright owner} + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + +django-csp +3.7 +BSD License +Copyright (c) 2013, Mozilla Foundation +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, +are permitted provided that the following conditions are met: + + 1. Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + + 2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + + 3. Neither the name of django-csp nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR +ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON +ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + + +django-debug-toolbar +3.5.0 +BSD License +Copyright (c) Rob Hudson and individual contributors. +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, +are permitted provided that the following conditions are met: + + 1. Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + + 2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + + 3. Neither the name of Django nor the names of its contributors may be used + to endorse or promote products derived from this software without + specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR +ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON +ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +django-filter +22.1 +BSD License +Copyright (c) Alex Gaynor and individual contributors. +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + * The names of its contributors may not be used to endorse or promote products + derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR +ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON +ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +django-phonenumber-field +7.0.0 +MIT License +Copyright (c) 2011 Stefan Foulis and contributors. + +Permission is hereby granted, free of charge, to any person +obtaining a copy of this software and associated documentation +files (the "Software"), to deal in the Software without +restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. + +django-storages +1.13.2 +BSD License +BSD 3-Clause License + +Copyright (c) 2008 - 2019, See AUTHORS file. +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +djangorestframework +3.14.0 +BSD License +# License + +Copyright © 2011-present, [Encode OSS Ltd](https://www.encode.io/). +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +* Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +djangorestframework-simplejwt +5.2.2 +MIT License +Copyright 2017 David Sanders + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + + +drf-nested-routers +0.93.4 +Apache Software License +Apache License +Version 2.0, January 2004 +http://www.apache.org/licenses/ + +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + +1. Definitions. + +"License" shall mean the terms and conditions for use, reproduction, and +distribution as defined by Sections 1 through 9 of this document. + +"Licensor" shall mean the copyright owner or entity authorized by the copyright +owner that is granting the License. + +"Legal Entity" shall mean the union of the acting entity and all other entities +that control, are controlled by, or are under common control with that entity. +For the purposes of this definition, "control" means (i) the power, direct or +indirect, to cause the direction or management of such entity, whether by +contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the +outstanding shares, or (iii) beneficial ownership of such entity. + +"You" (or "Your") shall mean an individual or Legal Entity exercising +permissions granted by this License. + +"Source" form shall mean the preferred form for making modifications, including +but not limited to software source code, documentation source, and configuration +files. + +"Object" form shall mean any form resulting from mechanical transformation or +translation of a Source form, including but not limited to compiled object code, +generated documentation, and conversions to other media types. + +"Work" shall mean the work of authorship, whether in Source or Object form, made +available under the License, as indicated by a copyright notice that is included +in or attached to the work (an example is provided in the Appendix below). + +"Derivative Works" shall mean any work, whether in Source or Object form, that +is based on (or derived from) the Work and for which the editorial revisions, +annotations, elaborations, or other modifications represent, as a whole, an +original work of authorship. For the purposes of this License, Derivative Works +shall not include works that remain separable from, or merely link (or bind by +name) to the interfaces of, the Work and Derivative Works thereof. + +"Contribution" shall mean any work of authorship, including the original version +of the Work and any modifications or additions to that Work or Derivative Works +thereof, that is intentionally submitted to Licensor for inclusion in the Work +by the copyright owner or by an individual or Legal Entity authorized to submit +on behalf of the copyright owner. For the purposes of this definition, +"submitted" means any form of electronic, verbal, or written communication sent +to the Licensor or its representatives, including but not limited to +communication on electronic mailing lists, source code control systems, and +issue tracking systems that are managed by, or on behalf of, the Licensor for +the purpose of discussing and improving the Work, but excluding communication +that is conspicuously marked or otherwise designated in writing by the copyright +owner as "Not a Contribution." + +"Contributor" shall mean Licensor and any individual or Legal Entity on behalf +of whom a Contribution has been received by Licensor and subsequently +incorporated within the Work. + +2. Grant of Copyright License. + +Subject to the terms and conditions of this License, each Contributor hereby +grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, +irrevocable copyright license to reproduce, prepare Derivative Works of, +publicly display, publicly perform, sublicense, and distribute the Work and such +Derivative Works in Source or Object form. + +3. Grant of Patent License. + +Subject to the terms and conditions of this License, each Contributor hereby +grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, +irrevocable (except as stated in this section) patent license to make, have +made, use, offer to sell, sell, import, and otherwise transfer the Work, where +such license applies only to those patent claims licensable by such Contributor +that are necessarily infringed by their Contribution(s) alone or by combination +of their Contribution(s) with the Work to which such Contribution(s) was +submitted. If You institute patent litigation against any entity (including a +cross-claim or counterclaim in a lawsuit) alleging that the Work or a +Contribution incorporated within the Work constitutes direct or contributory +patent infringement, then any patent licenses granted to You under this License +for that Work shall terminate as of the date such litigation is filed. + +4. Redistribution. + +You may reproduce and distribute copies of the Work or Derivative Works thereof +in any medium, with or without modifications, and in Source or Object form, +provided that You meet the following conditions: + +You must give any other recipients of the Work or Derivative Works a copy of +this License; and +You must cause any modified files to carry prominent notices stating that You +changed the files; and +You must retain, in the Source form of any Derivative Works that You distribute, +all copyright, patent, trademark, and attribution notices from the Source form +of the Work, excluding those notices that do not pertain to any part of the +Derivative Works; and +If the Work includes a "NOTICE" text file as part of its distribution, then any +Derivative Works that You distribute must include a readable copy of the +attribution notices contained within such NOTICE file, excluding those notices +that do not pertain to any part of the Derivative Works, in at least one of the +following places: within a NOTICE text file distributed as part of the +Derivative Works; within the Source form or documentation, if provided along +with the Derivative Works; or, within a display generated by the Derivative +Works, if and wherever such third-party notices normally appear. The contents of +the NOTICE file are for informational purposes only and do not modify the +License. You may add Your own attribution notices within Derivative Works that +You distribute, alongside or as an addendum to the NOTICE text from the Work, +provided that such additional attribution notices cannot be construed as +modifying the License. +You may add Your own copyright statement to Your modifications and may provide +additional or different license terms and conditions for use, reproduction, or +distribution of Your modifications, or for any such Derivative Works as a whole, +provided Your use, reproduction, and distribution of the Work otherwise complies +with the conditions stated in this License. + +5. Submission of Contributions. + +Unless You explicitly state otherwise, any Contribution intentionally submitted +for inclusion in the Work by You to the Licensor shall be under the terms and +conditions of this License, without any additional terms or conditions. +Notwithstanding the above, nothing herein shall supersede or modify the terms of +any separate license agreement you may have executed with Licensor regarding +such Contributions. + +6. Trademarks. + +This License does not grant permission to use the trade names, trademarks, +service marks, or product names of the Licensor, except as required for +reasonable and customary use in describing the origin of the Work and +reproducing the content of the NOTICE file. + +7. Disclaimer of Warranty. + +Unless required by applicable law or agreed to in writing, Licensor provides the +Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, +including, without limitation, any warranties or conditions of TITLE, +NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are +solely responsible for determining the appropriateness of using or +redistributing the Work and assume any risks associated with Your exercise of +permissions under this License. + +8. Limitation of Liability. + +In no event and under no legal theory, whether in tort (including negligence), +contract, or otherwise, unless required by applicable law (such as deliberate +and grossly negligent acts) or agreed to in writing, shall any Contributor be +liable to You for damages, including any direct, indirect, special, incidental, +or consequential damages of any character arising as a result of this License or +out of the use or inability to use the Work (including but not limited to +damages for loss of goodwill, work stoppage, computer failure or malfunction, or +any and all other commercial damages or losses), even if such Contributor has +been advised of the possibility of such damages. + +9. Accepting Warranty or Additional Liability. + +While redistributing the Work or Derivative Works thereof, You may choose to +offer, and charge a fee for, acceptance of support, warranty, indemnity, or +other liability obligations and/or rights consistent with this License. However, +in accepting such obligations, You may act only on Your own behalf and on Your +sole responsibility, not on behalf of any other Contributor, and only if You +agree to indemnify, defend, and hold each Contributor harmless for any liability +incurred by, or claims asserted against, such Contributor by reason of your +accepting any such warranty or additional liability. + +END OF TERMS AND CONDITIONS + +APPENDIX: How to apply the Apache License to your work + +To apply the Apache License to your work, attach the following boilerplate +notice, with the fields enclosed by brackets "[]" replaced with your own +identifying information. (Don't include the brackets!) The text should be +enclosed in the appropriate comment syntax for the file format. We also +recommend that a file or class name and description of purpose be included on +the same "printed page" as the copyright notice for easier identification within +third-party archives. + + Copyright 2003 Alan Justino da Silva + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + +elastic-apm +6.14.0 +BSD License +BSD 3-Clause License + +Copyright (c) 2009-2012, David Cramer and individual contributors +Copyright (c) 2013-2018, Elasticsearch BV +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +* Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +exceptiongroup +1.1.1 +MIT License +The MIT License (MIT) + +Copyright (c) 2022 Alex Grönholm + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + + +This project contains code copied from the Python standard library. +The following is the required license notice for those parts. + +PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2 +-------------------------------------------- + +1. This LICENSE AGREEMENT is between the Python Software Foundation +("PSF"), and the Individual or Organization ("Licensee") accessing and +otherwise using this software ("Python") in source or binary form and +its associated documentation. + +2. Subject to the terms and conditions of this License Agreement, PSF hereby +grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce, +analyze, test, perform and/or display publicly, prepare derivative works, +distribute, and otherwise use Python alone or in any derivative version, +provided, however, that PSF's License Agreement and PSF's notice of copyright, +i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, +2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020, 2021, 2022 Python Software Foundation; +All Rights Reserved" are retained in Python alone or in any derivative version +prepared by Licensee. + +3. In the event Licensee prepares a derivative work that is based on +or incorporates Python or any part thereof, and wants to make +the derivative work available to others as provided herein, then +Licensee hereby agrees to include in any such work a brief summary of +the changes made to Python. + +4. PSF is making Python available to Licensee on an "AS IS" +basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT +INFRINGE ANY THIRD PARTY RIGHTS. + +5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON +FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS +A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON, +OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + +6. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +7. Nothing in this License Agreement shall be deemed to create any +relationship of agency, partnership, or joint venture between PSF and +Licensee. This License Agreement does not grant permission to use PSF +trademarks or trade name in a trademark sense to endorse or promote +products or services of Licensee, or any third party. + +8. By copying, installing or otherwise using Python, Licensee +agrees to be bound by the terms and conditions of this License +Agreement. + + +execnet +1.9.0 +MIT License + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in all + copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + SOFTWARE. + + +fido2 +1.1.0 +Apache Software License; BSD License; Mozilla Public License 2.0 (MPL 2.0) +Copyright (c) 2018 Yubico AB +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials provided + with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +fonttools +4.39.2 +MIT License +MIT License + +Copyright (c) 2017 Just van Rossum + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + + +greenlet +2.0.1 +MIT License +The following files are derived from Stackless Python and are subject to the +same license as Stackless Python: + + src/greenlet/slp_platformselect.h + files in src/greenlet/platform/ directory + +See LICENSE.PSF and http://www.stackless.com/ for details. + +Unless otherwise noted, the files in greenlet have been released under the +following MIT license: + +Copyright (c) Armin Rigo, Christian Tismer and contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. + + +gunicorn +20.1.0 +MIT License +2009-2018 (c) Benoît Chesneau +2009-2015 (c) Paul J. Davis + +Permission is hereby granted, free of charge, to any person +obtaining a copy of this software and associated documentation +files (the "Software"), to deal in the Software without +restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. + + +h11 +0.14.0 +MIT License +The MIT License (MIT) + +Copyright (c) 2016 Nathaniel J. Smith and other contributors + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + + +html5lib +1.1 +MIT License +Copyright (c) 2006-2013 James Graham and other contributors + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + + +httpcore +0.16.3 +BSD License +Copyright © 2020, [Encode OSS Ltd](https://www.encode.io/). +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +* Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +httpx +0.23.3 +BSD License +Copyright © 2019, [Encode OSS Ltd](https://www.encode.io/). +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. + +* Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +idna +3.4 +BSD License +BSD 3-Clause License + +Copyright (c) 2013-2021, Kim Davies +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +iniconfig +2.0.0 +MIT License + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in all + copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + SOFTWARE. + + + +jmespath +1.0.1 +MIT License +Copyright (c) 2013 Amazon.com, Inc. or its affiliates. All Rights Reserved + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, dis- +tribute, sublicense, and/or sell copies of the Software, and to permit +persons to whom the Software is furnished to do so, subject to the fol- +lowing conditions: + +The above copyright notice and this permission notice shall be included +in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL- +ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT +SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. + + +jsonschema +4.17.0 +MIT License +Copyright (c) 2013 Julian Berman + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. + + +kombu +5.2.4 +BSD License +Copyright (c) 2015-2016 Ask Solem & contributors. All rights reserved. +Copyright (c) 2012-2014 GoPivotal Inc & contributors. All rights reserved. +Copyright (c) 2009-2012, Ask Solem & contributors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Ask Solem nor the + names of its contributors may be used to endorse or promote products + derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL Ask Solem OR CONTRIBUTORS +BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +POSSIBILITY OF SUCH DAMAGE. + + +librabbitmq +2.0.0 +Mozilla Public License 1.0 (MPL) + GNU GENERAL PUBLIC LICENSE + Version 2, June 1991 + + Copyright (C) 1989, 1991 Free Software Foundation, Inc., + 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The licenses for most software are designed to take away your +freedom to share and change it. By contrast, the GNU General Public +License is intended to guarantee your freedom to share and change free +software--to make sure the software is free for all its users. This +General Public License applies to most of the Free Software +Foundation's software and to any other program whose authors commit to +using it. (Some other Free Software Foundation software is covered by +the GNU Lesser General Public License instead.) You can apply it to +your programs, too. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +this service if you wish), that you receive source code or can get it +if you want it, that you can change the software or use pieces of it +in new free programs; and that you know you can do these things. + + To protect your rights, we need to make restrictions that forbid +anyone to deny you these rights or to ask you to surrender the rights. +These restrictions translate to certain responsibilities for you if you +distribute copies of the software, or if you modify it. + + For example, if you distribute copies of such a program, whether +gratis or for a fee, you must give the recipients all the rights that +you have. You must make sure that they, too, receive or can get the +source code. And you must show them these terms so they know their +rights. + + We protect your rights with two steps: (1) copyright the software, and +(2) offer you this license which gives you legal permission to copy, +distribute and/or modify the software. + + Also, for each author's protection and ours, we want to make certain +that everyone understands that there is no warranty for this free +software. If the software is modified by someone else and passed on, we +want its recipients to know that what they have is not the original, so +that any problems introduced by others will not reflect on the original +authors' reputations. + + Finally, any free program is threatened constantly by software +patents. We wish to avoid the danger that redistributors of a free +program will individually obtain patent licenses, in effect making the +program proprietary. To prevent this, we have made it clear that any +patent must be licensed for everyone's free use or not licensed at all. + + The precise terms and conditions for copying, distribution and +modification follow. + + GNU GENERAL PUBLIC LICENSE + TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION + + 0. This License applies to any program or other work which contains +a notice placed by the copyright holder saying it may be distributed +under the terms of this General Public License. The "Program", below, +refers to any such program or work, and a "work based on the Program" +means either the Program or any derivative work under copyright law: +that is to say, a work containing the Program or a portion of it, +either verbatim or with modifications and/or translated into another +language. (Hereinafter, translation is included without limitation in +the term "modification".) Each licensee is addressed as "you". + +Activities other than copying, distribution and modification are not +covered by this License; they are outside its scope. The act of +running the Program is not restricted, and the output from the Program +is covered only if its contents constitute a work based on the +Program (independent of having been made by running the Program). +Whether that is true depends on what the Program does. + + 1. You may copy and distribute verbatim copies of the Program's +source code as you receive it, in any medium, provided that you +conspicuously and appropriately publish on each copy an appropriate +copyright notice and disclaimer of warranty; keep intact all the +notices that refer to this License and to the absence of any warranty; +and give any other recipients of the Program a copy of this License +along with the Program. + +You may charge a fee for the physical act of transferring a copy, and +you may at your option offer warranty protection in exchange for a fee. + + 2. You may modify your copy or copies of the Program or any portion +of it, thus forming a work based on the Program, and copy and +distribute such modifications or work under the terms of Section 1 +above, provided that you also meet all of these conditions: + + a) You must cause the modified files to carry prominent notices + stating that you changed the files and the date of any change. + + b) You must cause any work that you distribute or publish, that in + whole or in part contains or is derived from the Program or any + part thereof, to be licensed as a whole at no charge to all third + parties under the terms of this License. + + c) If the modified program normally reads commands interactively + when run, you must cause it, when started running for such + interactive use in the most ordinary way, to print or display an + announcement including an appropriate copyright notice and a + notice that there is no warranty (or else, saying that you provide + a warranty) and that users may redistribute the program under + these conditions, and telling the user how to view a copy of this + License. (Exception: if the Program itself is interactive but + does not normally print such an announcement, your work based on + the Program is not required to print an announcement.) + +These requirements apply to the modified work as a whole. If +identifiable sections of that work are not derived from the Program, +and can be reasonably considered independent and separate works in +themselves, then this License, and its terms, do not apply to those +sections when you distribute them as separate works. But when you +distribute the same sections as part of a whole which is a work based +on the Program, the distribution of the whole must be on the terms of +this License, whose permissions for other licensees extend to the +entire whole, and thus to each and every part regardless of who wrote it. + +Thus, it is not the intent of this section to claim rights or contest +your rights to work written entirely by you; rather, the intent is to +exercise the right to control the distribution of derivative or +collective works based on the Program. + +In addition, mere aggregation of another work not based on the Program +with the Program (or with a work based on the Program) on a volume of +a storage or distribution medium does not bring the other work under +the scope of this License. + + 3. You may copy and distribute the Program (or a work based on it, +under Section 2) in object code or executable form under the terms of +Sections 1 and 2 above provided that you also do one of the following: + + a) Accompany it with the complete corresponding machine-readable + source code, which must be distributed under the terms of Sections + 1 and 2 above on a medium customarily used for software interchange; or, + + b) Accompany it with a written offer, valid for at least three + years, to give any third party, for a charge no more than your + cost of physically performing source distribution, a complete + machine-readable copy of the corresponding source code, to be + distributed under the terms of Sections 1 and 2 above on a medium + customarily used for software interchange; or, + + c) Accompany it with the information you received as to the offer + to distribute corresponding source code. (This alternative is + allowed only for noncommercial distribution and only if you + received the program in object code or executable form with such + an offer, in accord with Subsection b above.) + +The source code for a work means the preferred form of the work for +making modifications to it. For an executable work, complete source +code means all the source code for all modules it contains, plus any +associated interface definition files, plus the scripts used to +control compilation and installation of the executable. However, as a +special exception, the source code distributed need not include +anything that is normally distributed (in either source or binary +form) with the major components (compiler, kernel, and so on) of the +operating system on which the executable runs, unless that component +itself accompanies the executable. + +If distribution of executable or object code is made by offering +access to copy from a designated place, then offering equivalent +access to copy the source code from the same place counts as +distribution of the source code, even though third parties are not +compelled to copy the source along with the object code. + + 4. You may not copy, modify, sublicense, or distribute the Program +except as expressly provided under this License. Any attempt +otherwise to copy, modify, sublicense or distribute the Program is +void, and will automatically terminate your rights under this License. +However, parties who have received copies, or rights, from you under +this License will not have their licenses terminated so long as such +parties remain in full compliance. + + 5. You are not required to accept this License, since you have not +signed it. However, nothing else grants you permission to modify or +distribute the Program or its derivative works. These actions are +prohibited by law if you do not accept this License. Therefore, by +modifying or distributing the Program (or any work based on the +Program), you indicate your acceptance of this License to do so, and +all its terms and conditions for copying, distributing or modifying +the Program or works based on it. + + 6. Each time you redistribute the Program (or any work based on the +Program), the recipient automatically receives a license from the +original licensor to copy, distribute or modify the Program subject to +these terms and conditions. You may not impose any further +restrictions on the recipients' exercise of the rights granted herein. +You are not responsible for enforcing compliance by third parties to +this License. + + 7. If, as a consequence of a court judgment or allegation of patent +infringement or for any other reason (not limited to patent issues), +conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot +distribute so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you +may not distribute the Program at all. For example, if a patent +license would not permit royalty-free redistribution of the Program by +all those who receive copies directly or indirectly through you, then +the only way you could satisfy both it and this License would be to +refrain entirely from distribution of the Program. + +If any portion of this section is held invalid or unenforceable under +any particular circumstance, the balance of the section is intended to +apply and the section as a whole is intended to apply in other +circumstances. + +It is not the purpose of this section to induce you to infringe any +patents or other property right claims or to contest validity of any +such claims; this section has the sole purpose of protecting the +integrity of the free software distribution system, which is +implemented by public license practices. Many people have made +generous contributions to the wide range of software distributed +through that system in reliance on consistent application of that +system; it is up to the author/donor to decide if he or she is willing +to distribute software through any other system and a licensee cannot +impose that choice. + +This section is intended to make thoroughly clear what is believed to +be a consequence of the rest of this License. + + 8. If the distribution and/or use of the Program is restricted in +certain countries either by patents or by copyrighted interfaces, the +original copyright holder who places the Program under this License +may add an explicit geographical distribution limitation excluding +those countries, so that distribution is permitted only in or among +countries not thus excluded. In such case, this License incorporates +the limitation as if written in the body of this License. + + 9. The Free Software Foundation may publish revised and/or new versions +of the General Public License from time to time. Such new versions will +be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + +Each version is given a distinguishing version number. If the Program +specifies a version number of this License which applies to it and "any +later version", you have the option of following the terms and conditions +either of that version or of any later version published by the Free +Software Foundation. If the Program does not specify a version number of +this License, you may choose any version ever published by the Free Software +Foundation. + + 10. If you wish to incorporate parts of the Program into other free +programs whose distribution conditions are different, write to the author +to ask for permission. For software which is copyrighted by the Free +Software Foundation, write to the Free Software Foundation; we sometimes +make exceptions for this. Our decision will be guided by the two goals +of preserving the free status of all derivatives of our free software and +of promoting the sharing and reuse of software generally. + + NO WARRANTY + + 11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY +FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN +OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES +PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED +OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS +TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE +PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, +REPAIR OR CORRECTION. + + 12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR +REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, +INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING +OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED +TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY +YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER +PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE +POSSIBILITY OF SUCH DAMAGES. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +convey the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This program is free software; you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation; either version 2 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License along + with this program; if not, write to the Free Software Foundation, Inc., + 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +Also add information on how to contact you by electronic and paper mail. + +If the program is interactive, make it output a short notice like this +when it starts in an interactive mode: + + Gnomovision version 69, Copyright (C) year name of author + Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'. + This is free software, and you are welcome to redistribute it + under certain conditions; type `show c' for details. + +The hypothetical commands `show w' and `show c' should show the appropriate +parts of the General Public License. Of course, the commands you use may +be called something other than `show w' and `show c'; they could even be +mouse-clicks or menu items--whatever suits your program. + +You should also get your employer (if you work as a programmer) or your +school, if any, to sign a "copyright disclaimer" for the program, if +necessary. Here is a sample; alter the names: + + Yoyodyne, Inc., hereby disclaims all copyright interest in the program + `Gnomovision' (which makes passes at compilers) written by James Hacker. + + , 1 April 1989 + Ty Coon, President of Vice + +This General Public License does not permit incorporating your program into +proprietary programs. If your program is a subroutine library, you may +consider it more useful to permit linking proprietary applications with the +library. If this is what you want to do, use the GNU Lesser General +Public License instead of this License. + + +lorem-text +2.1 +MIT License +MIT License + +Copyright (c) 2020, Abhijeet Pal + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + + +lxml +4.9.2 +BSD License +Copyright (c) 2004 Infrae. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + 1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + 2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in + the documentation and/or other materials provided with the + distribution. + + 3. Neither the name of Infrae nor the names of its contributors may + be used to endorse or promote products derived from this software + without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL INFRAE OR +CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +packaging +23.0 +Apache Software License; BSD License +This software is made available under the terms of *either* of the licenses +found in LICENSE.APACHE or LICENSE.BSD. Contributions to this software is made +under the terms of *both* these licenses. + + +phonenumberslite +8.13.7 +Apache Software License + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + +pikepdf +6.0.2 +Mozilla Public License 2.0 (MPL 2.0) +Mozilla Public License Version 2.0 +================================== + +1. Definitions +-------------- + +1.1. "Contributor" + means each individual or legal entity that creates, contributes to + the creation of, or owns Covered Software. + +1.2. "Contributor Version" + means the combination of the Contributions of others (if any) used + by a Contributor and that particular Contributor's Contribution. + +1.3. "Contribution" + means Covered Software of a particular Contributor. + +1.4. "Covered Software" + means Source Code Form to which the initial Contributor has attached + the notice in Exhibit A, the Executable Form of such Source Code + Form, and Modifications of such Source Code Form, in each case + including portions thereof. + +1.5. "Incompatible With Secondary Licenses" + means + + (a) that the initial Contributor has attached the notice described + in Exhibit B to the Covered Software; or + + (b) that the Covered Software was made available under the terms of + version 1.1 or earlier of the License, but not also under the + terms of a Secondary License. + +1.6. "Executable Form" + means any form of the work other than Source Code Form. + +1.7. "Larger Work" + means a work that combines Covered Software with other material, in + a separate file or files, that is not Covered Software. + +1.8. "License" + means this document. + +1.9. "Licensable" + means having the right to grant, to the maximum extent possible, + whether at the time of the initial grant or subsequently, any and + all of the rights conveyed by this License. + +1.10. "Modifications" + means any of the following: + + (a) any file in Source Code Form that results from an addition to, + deletion from, or modification of the contents of Covered + Software; or + + (b) any new file in Source Code Form that contains any Covered + Software. + +1.11. "Patent Claims" of a Contributor + means any patent claim(s), including without limitation, method, + process, and apparatus claims, in any patent Licensable by such + Contributor that would be infringed, but for the grant of the + License, by the making, using, selling, offering for sale, having + made, import, or transfer of either its Contributions or its + Contributor Version. + +1.12. "Secondary License" + means either the GNU General Public License, Version 2.0, the GNU + Lesser General Public License, Version 2.1, the GNU Affero General + Public License, Version 3.0, or any later versions of those + licenses. + +1.13. "Source Code Form" + means the form of the work preferred for making modifications. + +1.14. "You" (or "Your") + means an individual or a legal entity exercising rights under this + License. For legal entities, "You" includes any entity that + controls, is controlled by, or is under common control with You. For + purposes of this definition, "control" means (a) the power, direct + or indirect, to cause the direction or management of such entity, + whether by contract or otherwise, or (b) ownership of more than + fifty percent (50%) of the outstanding shares or beneficial + ownership of such entity. + +2. License Grants and Conditions +-------------------------------- + +2.1. Grants + +Each Contributor hereby grants You a world-wide, royalty-free, +non-exclusive license: + +(a) under intellectual property rights (other than patent or trademark) + Licensable by such Contributor to use, reproduce, make available, + modify, display, perform, distribute, and otherwise exploit its + Contributions, either on an unmodified basis, with Modifications, or + as part of a Larger Work; and + +(b) under Patent Claims of such Contributor to make, use, sell, offer + for sale, have made, import, and otherwise transfer either its + Contributions or its Contributor Version. + +2.2. Effective Date + +The licenses granted in Section 2.1 with respect to any Contribution +become effective for each Contribution on the date the Contributor first +distributes such Contribution. + +2.3. Limitations on Grant Scope + +The licenses granted in this Section 2 are the only rights granted under +this License. No additional rights or licenses will be implied from the +distribution or licensing of Covered Software under this License. +Notwithstanding Section 2.1(b) above, no patent license is granted by a +Contributor: + +(a) for any code that a Contributor has removed from Covered Software; + or + +(b) for infringements caused by: (i) Your and any other third party's + modifications of Covered Software, or (ii) the combination of its + Contributions with other software (except as part of its Contributor + Version); or + +(c) under Patent Claims infringed by Covered Software in the absence of + its Contributions. + +This License does not grant any rights in the trademarks, service marks, +or logos of any Contributor (except as may be necessary to comply with +the notice requirements in Section 3.4). + +2.4. Subsequent Licenses + +No Contributor makes additional grants as a result of Your choice to +distribute the Covered Software under a subsequent version of this +License (see Section 10.2) or under the terms of a Secondary License (if +permitted under the terms of Section 3.3). + +2.5. Representation + +Each Contributor represents that the Contributor believes its +Contributions are its original creation(s) or it has sufficient rights +to grant the rights to its Contributions conveyed by this License. + +2.6. Fair Use + +This License is not intended to limit any rights You have under +applicable copyright doctrines of fair use, fair dealing, or other +equivalents. + +2.7. Conditions + +Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted +in Section 2.1. + +3. Responsibilities +------------------- + +3.1. Distribution of Source Form + +All distribution of Covered Software in Source Code Form, including any +Modifications that You create or to which You contribute, must be under +the terms of this License. You must inform recipients that the Source +Code Form of the Covered Software is governed by the terms of this +License, and how they can obtain a copy of this License. You may not +attempt to alter or restrict the recipients' rights in the Source Code +Form. + +3.2. Distribution of Executable Form + +If You distribute Covered Software in Executable Form then: + +(a) such Covered Software must also be made available in Source Code + Form, as described in Section 3.1, and You must inform recipients of + the Executable Form how they can obtain a copy of such Source Code + Form by reasonable means in a timely manner, at a charge no more + than the cost of distribution to the recipient; and + +(b) You may distribute such Executable Form under the terms of this + License, or sublicense it under different terms, provided that the + license for the Executable Form does not attempt to limit or alter + the recipients' rights in the Source Code Form under this License. + +3.3. Distribution of a Larger Work + +You may create and distribute a Larger Work under terms of Your choice, +provided that You also comply with the requirements of this License for +the Covered Software. If the Larger Work is a combination of Covered +Software with a work governed by one or more Secondary Licenses, and the +Covered Software is not Incompatible With Secondary Licenses, this +License permits You to additionally distribute such Covered Software +under the terms of such Secondary License(s), so that the recipient of +the Larger Work may, at their option, further distribute the Covered +Software under the terms of either this License or such Secondary +License(s). + +3.4. Notices + +You may not remove or alter the substance of any license notices +(including copyright notices, patent notices, disclaimers of warranty, +or limitations of liability) contained within the Source Code Form of +the Covered Software, except that You may alter any license notices to +the extent required to remedy known factual inaccuracies. + +3.5. Application of Additional Terms + +You may choose to offer, and to charge a fee for, warranty, support, +indemnity or liability obligations to one or more recipients of Covered +Software. However, You may do so only on Your own behalf, and not on +behalf of any Contributor. You must make it absolutely clear that any +such warranty, support, indemnity, or liability obligation is offered by +You alone, and You hereby agree to indemnify every Contributor for any +liability incurred by such Contributor as a result of warranty, support, +indemnity or liability terms You offer. You may include additional +disclaimers of warranty and limitations of liability specific to any +jurisdiction. + +4. Inability to Comply Due to Statute or Regulation +--------------------------------------------------- + +If it is impossible for You to comply with any of the terms of this +License with respect to some or all of the Covered Software due to +statute, judicial order, or regulation then You must: (a) comply with +the terms of this License to the maximum extent possible; and (b) +describe the limitations and the code they affect. Such description must +be placed in a text file included with all distributions of the Covered +Software under this License. Except to the extent prohibited by statute +or regulation, such description must be sufficiently detailed for a +recipient of ordinary skill to be able to understand it. + +5. Termination +-------------- + +5.1. The rights granted under this License will terminate automatically +if You fail to comply with any of its terms. However, if You become +compliant, then the rights granted under this License from a particular +Contributor are reinstated (a) provisionally, unless and until such +Contributor explicitly and finally terminates Your grants, and (b) on an +ongoing basis, if such Contributor fails to notify You of the +non-compliance by some reasonable means prior to 60 days after You have +come back into compliance. Moreover, Your grants from a particular +Contributor are reinstated on an ongoing basis if such Contributor +notifies You of the non-compliance by some reasonable means, this is the +first time You have received notice of non-compliance with this License +from such Contributor, and You become compliant prior to 30 days after +Your receipt of the notice. + +5.2. If You initiate litigation against any entity by asserting a patent +infringement claim (excluding declaratory judgment actions, +counter-claims, and cross-claims) alleging that a Contributor Version +directly or indirectly infringes any patent, then the rights granted to +You by any and all Contributors for the Covered Software under Section +2.1 of this License shall terminate. + +5.3. In the event of termination under Sections 5.1 or 5.2 above, all +end user license agreements (excluding distributors and resellers) which +have been validly granted by You or Your distributors under this License +prior to termination shall survive termination. + +************************************************************************ +* * +* 6. Disclaimer of Warranty * +* ------------------------- * +* * +* Covered Software is provided under this License on an "as is" * +* basis, without warranty of any kind, either expressed, implied, or * +* statutory, including, without limitation, warranties that the * +* Covered Software is free of defects, merchantable, fit for a * +* particular purpose or non-infringing. The entire risk as to the * +* quality and performance of the Covered Software is with You. * +* Should any Covered Software prove defective in any respect, You * +* (not any Contributor) assume the cost of any necessary servicing, * +* repair, or correction. This disclaimer of warranty constitutes an * +* essential part of this License. No use of any Covered Software is * +* authorized under this License except under this disclaimer. * +* * +************************************************************************ + +************************************************************************ +* * +* 7. Limitation of Liability * +* -------------------------- * +* * +* Under no circumstances and under no legal theory, whether tort * +* (including negligence), contract, or otherwise, shall any * +* Contributor, or anyone who distributes Covered Software as * +* permitted above, be liable to You for any direct, indirect, * +* special, incidental, or consequential damages of any character * +* including, without limitation, damages for lost profits, loss of * +* goodwill, work stoppage, computer failure or malfunction, or any * +* and all other commercial damages or losses, even if such party * +* shall have been informed of the possibility of such damages. This * +* limitation of liability shall not apply to liability for death or * +* personal injury resulting from such party's negligence to the * +* extent applicable law prohibits such limitation. Some * +* jurisdictions do not allow the exclusion or limitation of * +* incidental or consequential damages, so this exclusion and * +* limitation may not apply to You. * +* * +************************************************************************ + +8. Litigation +------------- + +Any litigation relating to this License may be brought only in the +courts of a jurisdiction where the defendant maintains its principal +place of business and such litigation shall be governed by laws of that +jurisdiction, without reference to its conflict-of-law provisions. +Nothing in this Section shall prevent a party's ability to bring +cross-claims or counter-claims. + +9. Miscellaneous +---------------- + +This License represents the complete agreement concerning the subject +matter hereof. If any provision of this License is held to be +unenforceable, such provision shall be reformed only to the extent +necessary to make it enforceable. Any law or regulation which provides +that the language of a contract shall be construed against the drafter +shall not be used to construe this License against a Contributor. + +10. Versions of the License +--------------------------- + +10.1. New Versions + +Mozilla Foundation is the license steward. Except as provided in Section +10.3, no one other than the license steward has the right to modify or +publish new versions of this License. Each version will be given a +distinguishing version number. + +10.2. Effect of New Versions + +You may distribute the Covered Software under the terms of the version +of the License under which You originally received the Covered Software, +or under the terms of any subsequent version published by the license +steward. + +10.3. Modified Versions + +If you create software not governed by this License, and you want to +create a new license for such software, you may create and use a +modified version of this License if you rename the license and remove +any references to the name of the license steward (except to note that +such modified license differs from this License). + +10.4. Distributing Source Code Form that is Incompatible With Secondary +Licenses + +If You choose to distribute Source Code Form that is Incompatible With +Secondary Licenses under the terms of this version of the License, the +notice described in Exhibit B of this License must be attached. + +Exhibit A - Source Code Form License Notice +------------------------------------------- + + This Source Code Form is subject to the terms of the Mozilla Public + License, v. 2.0. If a copy of the MPL was not distributed with this + file, You can obtain one at http://mozilla.org/MPL/2.0/. + +If it is not possible or desirable to put the notice in a particular +file, then You may include the notice in a location (such as a LICENSE +file in a relevant directory) where a recipient would be likely to look +for such a notice. + +You may add additional accurate notices of copyright ownership. + +Exhibit B - "Incompatible With Secondary Licenses" Notice +--------------------------------------------------------- + + This Source Code Form is "Incompatible With Secondary Licenses", as + defined by the Mozilla Public License, v. 2.0. + + +pillow-heif +0.7.2 +GNU General Public License v2 (GPLv2) + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + +playwright +1.28.0 +Apache Software License + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Portions Copyright (c) Microsoft Corporation. + Portions Copyright 2017 Google Inc. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + +pluggy +1.0.0 +MIT License +The MIT License (MIT) + +Copyright (c) 2015 holger krekel (rather uses bitbucket/hpk42) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + + +prompt-toolkit +3.0.38 +BSD License +Copyright (c) 2014, Jonathan Slenders +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, +are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, this + list of conditions and the following disclaimer in the documentation and/or + other materials provided with the distribution. + +* Neither the name of the {organization} nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR +ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON +ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +psycopg2-binary +2.9.5 +GNU Library or Lesser General Public License (LGPL) +psycopg2 and the LGPL +--------------------- + +psycopg2 is free software: you can redistribute it and/or modify it +under the terms of the GNU Lesser General Public License as published +by the Free Software Foundation, either version 3 of the License, or +(at your option) any later version. + +psycopg2 is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public +License for more details. + +In addition, as a special exception, the copyright holders give +permission to link this program with the OpenSSL library (or with +modified versions of OpenSSL that use the same license as OpenSSL), +and distribute linked combinations including the two. + +You must obey the GNU Lesser General Public License in all respects for +all of the code used other than OpenSSL. If you modify file(s) with this +exception, you may extend this exception to your version of the file(s), +but you are not obligated to do so. If you do not wish to do so, delete +this exception statement from your version. If you delete this exception +statement from all source files in the program, then also delete it here. + +You should have received a copy of the GNU Lesser General Public License +along with psycopg2 (see the doc/ directory.) +If not, see . + + +Alternative licenses +-------------------- + +The following BSD-like license applies (at your option) to the files following +the pattern ``psycopg/adapter*.{h,c}`` and ``psycopg/microprotocol*.{h,c}``: + + Permission is granted to anyone to use this software for any purpose, + including commercial applications, and to alter it and redistribute it + freely, subject to the following restrictions: + + 1. The origin of this software must not be misrepresented; you must not + claim that you wrote the original software. If you use this + software in a product, an acknowledgment in the product documentation + would be appreciated but is not required. + + 2. Altered source versions must be plainly marked as such, and must not + be misrepresented as being the original software. + + 3. This notice may not be removed or altered from any source distribution. + + +pycparser +2.21 +BSD License +pycparser -- A C parser in Python + +Copyright (c) 2008-2020, Eli Bendersky +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, +are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. +* Neither the name of Eli Bendersky nor the names of its contributors may + be used to endorse or promote products derived from this software without + specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE +LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE +GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) +HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT +OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +pycryptodomex +3.15.0 +Apache Software License; BSD License; Public Domain +The source code in PyCryptodome is partially in the public domain +and partially released under the BSD 2-Clause license. + +In either case, there are minimal if no restrictions on the redistribution, +modification and usage of the software. + +Public domain +============= + +All code originating from PyCrypto is free and unencumbered software +released into the public domain. + +Anyone is free to copy, modify, publish, use, compile, sell, or +distribute this software, either in source code form or as a compiled +binary, for any purpose, commercial or non-commercial, and by any +means. + +In jurisdictions that recognize copyright laws, the author or authors +of this software dedicate any and all copyright interest in the +software to the public domain. We make this dedication for the benefit +of the public at large and to the detriment of our heirs and +successors. We intend this dedication to be an overt act of +relinquishment in perpetuity of all present and future rights to this +software under copyright law. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR +OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, +ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. + +For more information, please refer to + +BSD license +=========== + +All direct contributions to PyCryptodome are released under the following +license. The copyright of each piece belongs to the respective author. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +pydyf +0.5.0 +BSD License +BSD 3-Clause License + +Copyright (c) 2020, CourtBouillon +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +pyee +9.0.4 +MIT License +The MIT License (MIT) + +Copyright (c) 2021 Josh Holbrook + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. + + +pyotp +2.8.0 +MIT License +Copyright (C) 2011-2021 Mark Percival , +Nathan Reynolds , Andrey Kislyuk , +and PyOTP contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. + + +pyphen +0.14.0 +GNU General Public License v2 or later (GPLv2+); GNU Lesser General Public License v2 or later (LGPLv2+); Mozilla Public License 1.1 (MPL 1.1) + + GNU GENERAL PUBLIC LICENSE + Version 2, June 1991 + + Copyright (C) 1989, 1991 Free Software Foundation, Inc. + 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The licenses for most software are designed to take away your + freedom to share and change it. By contrast, the GNU General Public + License is intended to guarantee your freedom to share and change free + software--to make sure the software is free for all its users. This + General Public License applies to most of the Free Software + Foundation's software and to any other program whose authors commit to + using it. (Some other Free Software Foundation software is covered by + the GNU Library General Public License instead.) You can apply it to + your programs, too. + + When we speak of free software, we are referring to freedom, not + price. Our General Public Licenses are designed to make sure that you + have the freedom to distribute copies of free software (and charge for + this service if you wish), that you receive source code or can get it + if you want it, that you can change the software or use pieces of it + in new free programs; and that you know you can do these things. + + To protect your rights, we need to make restrictions that forbid + anyone to deny you these rights or to ask you to surrender the rights. + These restrictions translate to certain responsibilities for you if you + distribute copies of the software, or if you modify it. + + For example, if you distribute copies of such a program, whether + gratis or for a fee, you must give the recipients all the rights that + you have. You must make sure that they, too, receive or can get the + source code. And you must show them these terms so they know their + rights. + + We protect your rights with two steps: (1) copyright the software, and + (2) offer you this license which gives you legal permission to copy, + distribute and/or modify the software. + + Also, for each author's protection and ours, we want to make certain + that everyone understands that there is no warranty for this free + software. If the software is modified by someone else and passed on, we + want its recipients to know that what they have is not the original, so + that any problems introduced by others will not reflect on the original + authors' reputations. + + Finally, any free program is threatened constantly by software + patents. We wish to avoid the danger that redistributors of a free + program will individually obtain patent licenses, in effect making the + program proprietary. To prevent this, we have made it clear that any + patent must be licensed for everyone's free use or not licensed at all. + + The precise terms and conditions for copying, distribution and + modification follow. + + GNU GENERAL PUBLIC LICENSE + TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION + + 0. This License applies to any program or other work which contains + a notice placed by the copyright holder saying it may be distributed + under the terms of this General Public License. The "Program", below, + refers to any such program or work, and a "work based on the Program" + means either the Program or any derivative work under copyright law: + that is to say, a work containing the Program or a portion of it, + either verbatim or with modifications and/or translated into another + language. (Hereinafter, translation is included without limitation in + the term "modification".) Each licensee is addressed as "you". + + Activities other than copying, distribution and modification are not + covered by this License; they are outside its scope. The act of + running the Program is not restricted, and the output from the Program + is covered only if its contents constitute a work based on the + Program (independent of having been made by running the Program). + Whether that is true depends on what the Program does. + + 1. You may copy and distribute verbatim copies of the Program's + source code as you receive it, in any medium, provided that you + conspicuously and appropriately publish on each copy an appropriate + copyright notice and disclaimer of warranty; keep intact all the + notices that refer to this License and to the absence of any warranty; + and give any other recipients of the Program a copy of this License + along with the Program. + + You may charge a fee for the physical act of transferring a copy, and + you may at your option offer warranty protection in exchange for a fee. + + 2. You may modify your copy or copies of the Program or any portion + of it, thus forming a work based on the Program, and copy and + distribute such modifications or work under the terms of Section 1 + above, provided that you also meet all of these conditions: + + a) You must cause the modified files to carry prominent notices + stating that you changed the files and the date of any change. + + b) You must cause any work that you distribute or publish, that in + whole or in part contains or is derived from the Program or any + part thereof, to be licensed as a whole at no charge to all third + parties under the terms of this License. + + c) If the modified program normally reads commands interactively + when run, you must cause it, when started running for such + interactive use in the most ordinary way, to print or display an + announcement including an appropriate copyright notice and a + notice that there is no warranty (or else, saying that you provide + a warranty) and that users may redistribute the program under + these conditions, and telling the user how to view a copy of this + License. (Exception: if the Program itself is interactive but + does not normally print such an announcement, your work based on + the Program is not required to print an announcement.) + + These requirements apply to the modified work as a whole. If + identifiable sections of that work are not derived from the Program, + and can be reasonably considered independent and separate works in + themselves, then this License, and its terms, do not apply to those + sections when you distribute them as separate works. But when you + distribute the same sections as part of a whole which is a work based + on the Program, the distribution of the whole must be on the terms of + this License, whose permissions for other licensees extend to the + entire whole, and thus to each and every part regardless of who wrote it. + + Thus, it is not the intent of this section to claim rights or contest + your rights to work written entirely by you; rather, the intent is to + exercise the right to control the distribution of derivative or + collective works based on the Program. + + In addition, mere aggregation of another work not based on the Program + with the Program (or with a work based on the Program) on a volume of + a storage or distribution medium does not bring the other work under + the scope of this License. + + 3. You may copy and distribute the Program (or a work based on it, + under Section 2) in object code or executable form under the terms of + Sections 1 and 2 above provided that you also do one of the following: + + a) Accompany it with the complete corresponding machine-readable + source code, which must be distributed under the terms of Sections + 1 and 2 above on a medium customarily used for software interchange; or, + + b) Accompany it with a written offer, valid for at least three + years, to give any third party, for a charge no more than your + cost of physically performing source distribution, a complete + machine-readable copy of the corresponding source code, to be + distributed under the terms of Sections 1 and 2 above on a medium + customarily used for software interchange; or, + + c) Accompany it with the information you received as to the offer + to distribute corresponding source code. (This alternative is + allowed only for noncommercial distribution and only if you + received the program in object code or executable form with such + an offer, in accord with Subsection b above.) + + The source code for a work means the preferred form of the work for + making modifications to it. For an executable work, complete source + code means all the source code for all modules it contains, plus any + associated interface definition files, plus the scripts used to + control compilation and installation of the executable. However, as a + special exception, the source code distributed need not include + anything that is normally distributed (in either source or binary + form) with the major components (compiler, kernel, and so on) of the + operating system on which the executable runs, unless that component + itself accompanies the executable. + + If distribution of executable or object code is made by offering + access to copy from a designated place, then offering equivalent + access to copy the source code from the same place counts as + distribution of the source code, even though third parties are not + compelled to copy the source along with the object code. + + 4. You may not copy, modify, sublicense, or distribute the Program + except as expressly provided under this License. Any attempt + otherwise to copy, modify, sublicense or distribute the Program is + void, and will automatically terminate your rights under this License. + However, parties who have received copies, or rights, from you under + this License will not have their licenses terminated so long as such + parties remain in full compliance. + + 5. You are not required to accept this License, since you have not + signed it. However, nothing else grants you permission to modify or + distribute the Program or its derivative works. These actions are + prohibited by law if you do not accept this License. Therefore, by + modifying or distributing the Program (or any work based on the + Program), you indicate your acceptance of this License to do so, and + all its terms and conditions for copying, distributing or modifying + the Program or works based on it. + + 6. Each time you redistribute the Program (or any work based on the + Program), the recipient automatically receives a license from the + original licensor to copy, distribute or modify the Program subject to + these terms and conditions. You may not impose any further + restrictions on the recipients' exercise of the rights granted herein. + You are not responsible for enforcing compliance by third parties to + this License. + + 7. If, as a consequence of a court judgment or allegation of patent + infringement or for any other reason (not limited to patent issues), + conditions are imposed on you (whether by court order, agreement or + otherwise) that contradict the conditions of this License, they do not + excuse you from the conditions of this License. If you cannot + distribute so as to satisfy simultaneously your obligations under this + License and any other pertinent obligations, then as a consequence you + may not distribute the Program at all. For example, if a patent + license would not permit royalty-free redistribution of the Program by + all those who receive copies directly or indirectly through you, then + the only way you could satisfy both it and this License would be to + refrain entirely from distribution of the Program. + + If any portion of this section is held invalid or unenforceable under + any particular circumstance, the balance of the section is intended to + apply and the section as a whole is intended to apply in other + circumstances. + + It is not the purpose of this section to induce you to infringe any + patents or other property right claims or to contest validity of any + such claims; this section has the sole purpose of protecting the + integrity of the free software distribution system, which is + implemented by public license practices. Many people have made + generous contributions to the wide range of software distributed + through that system in reliance on consistent application of that + system; it is up to the author/donor to decide if he or she is willing + to distribute software through any other system and a licensee cannot + impose that choice. + + This section is intended to make thoroughly clear what is believed to + be a consequence of the rest of this License. + + 8. If the distribution and/or use of the Program is restricted in + certain countries either by patents or by copyrighted interfaces, the + original copyright holder who places the Program under this License + may add an explicit geographical distribution limitation excluding + those countries, so that distribution is permitted only in or among + countries not thus excluded. In such case, this License incorporates + the limitation as if written in the body of this License. + + 9. The Free Software Foundation may publish revised and/or new versions + of the General Public License from time to time. Such new versions will + be similar in spirit to the present version, but may differ in detail to + address new problems or concerns. + + Each version is given a distinguishing version number. If the Program + specifies a version number of this License which applies to it and "any + later version", you have the option of following the terms and conditions + either of that version or of any later version published by the Free + Software Foundation. If the Program does not specify a version number of + this License, you may choose any version ever published by the Free Software + Foundation. + + 10. If you wish to incorporate parts of the Program into other free + programs whose distribution conditions are different, write to the author + to ask for permission. For software which is copyrighted by the Free + Software Foundation, write to the Free Software Foundation; we sometimes + make exceptions for this. Our decision will be guided by the two goals + of preserving the free status of all derivatives of our free software and + of promoting the sharing and reuse of software generally. + + NO WARRANTY + + 11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY + FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN + OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES + PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED + OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF + MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS + TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE + PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, + REPAIR OR CORRECTION. + + 12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING + WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR + REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, + INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING + OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED + TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY + YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER + PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE + POSSIBILITY OF SUCH DAMAGES. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest + possible use to the public, the best way to achieve this is to make it + free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest + to attach them to the start of each source file to most effectively + convey the exclusion of warranty; and each file should have at least + the "copyright" line and a pointer to where the full notice is found. + + one line to give the program's name and a brief idea of what it does. + Copyright (C) year name of author + + This program is free software; you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation; either version 2 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program; if not, write to the Free Software + Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA + + + Also add information on how to contact you by electronic and paper mail. + + If the program is interactive, make it output a short notice like this + when it starts in an interactive mode: + + Gnomovision version 69, Copyright (C) year name of author + Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'. + This is free software, and you are welcome to redistribute it + under certain conditions; type `show c' for details. + + The hypothetical commands `show w' and `show c' should show the appropriate + parts of the General Public License. Of course, the commands you use may + be called something other than `show w' and `show c'; they could even be + mouse-clicks or menu items--whatever suits your program. + + You should also get your employer (if you work as a programmer) or your + school, if any, to sign a "copyright disclaimer" for the program, if + necessary. Here is a sample; alter the names: + + Yoyodyne, Inc., hereby disclaims all copyright interest in the program + `Gnomovision' (which makes passes at compilers) written by James Hacker. + + signature of Ty Coon, 1 April 1989 + Ty Coon, President of Vice + + This General Public License does not permit incorporating your program into + proprietary programs. If your program is a subroutine library, you may + consider it more useful to permit linking proprietary applications with the + library. If this is what you want to do, use the GNU Library General + Public License instead of this License. + + + +pyrsistent +0.19.3 +MIT License +Copyright (c) 2022 Tobias Gustafsson + +Permission is hereby granted, free of charge, to any person +obtaining a copy of this software and associated documentation +files (the "Software"), to deal in the Software without +restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. + +pytest +7.2.2 +MIT License +The MIT License (MIT) + +Copyright (c) 2004 Holger Krekel and others + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + + +pytest-cov +4.0.0 +MIT License +The MIT License + +Copyright (c) 2010 Meme Dough + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. + + +pytest-django +4.5.2 +BSD License +pytest-django is released under the BSD (3-clause) license +---------------------------------------------------------- +Copyright (c) 2015-2018, pytest-django authors (see AUTHORS file) +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + * The names of its contributors may not be used to endorse or promote products + derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR +ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON +ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +This version of pytest-django is a fork of pytest_django created by Ben Firshman. +--------------------------------------------------------------------------------- +Copyright (c) 2009, Ben Firshman +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + * The names of its contributors may not be used to endorse or promote products + derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR +ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON +ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +pytest-xdist +3.0.2 +MIT License + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in all + copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + SOFTWARE. + + +python-dateutil +2.8.2 +Apache Software License; BSD License +Copyright 2017- Paul Ganssle +Copyright 2017- dateutil contributors (see AUTHORS file) + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +The above license applies to all contributions after 2017-12-01, as well as +all contributions that have been re-licensed (see AUTHORS file for the list of +contributors who have re-licensed their code). +-------------------------------------------------------------------------------- +dateutil - Extensions to the standard Python datetime module. + +Copyright (c) 2003-2011 - Gustavo Niemeyer +Copyright (c) 2012-2014 - Tomi Pieviläinen +Copyright (c) 2014-2016 - Yaron de Leeuw +Copyright (c) 2015- - Paul Ganssle +Copyright (c) 2015- - dateutil contributors (see AUTHORS file) + +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + * Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR +CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +The above BSD License Applies to all code, even that also covered by Apache 2.0. + +python-decouple +3.6 +MIT License +The MIT License + +Copyright (c) 2013 Henrique Bastos + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. + + +python-gnupg +0.5.0 +BSD License +Copyright (c) 2008-2022 by Vinay Sajip. +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + * The name(s) of the copyright holder(s) may not be used to endorse or + promote products derived from this software without specific prior + written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER(S) "AS IS" AND ANY EXPRESS OR +IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO +EVENT SHALL THE COPYRIGHT HOLDER(S) BE LIABLE FOR ANY DIRECT, INDIRECT, +INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE +OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF +ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + + +pytz +2022.7.1 +MIT License +Copyright (c) 2003-2019 Stuart Bishop + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL +THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. + + +qrcode +7.3.1 +BSD License +Copyright (c) 2011, Lincoln Loop +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + * Neither the package name nor the names of its contributors may be + used to endorse or promote products derived from this software without + specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR +ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON +ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +------------------------------------------------------------------------------- + + +Original text and license from the pyqrnative package where this was forked +from (http://code.google.com/p/pyqrnative): + +#Ported from the Javascript library by Sam Curren +# +#QRCode for Javascript +#http://d-project.googlecode.com/svn/trunk/misc/qrcode/js/qrcode.js +# +#Copyright (c) 2009 Kazuhiko Arase +# +#URL: http://www.d-project.com/ +# +#Licensed under the MIT license: +# http://www.opensource.org/licenses/mit-license.php +# +# The word "QR Code" is registered trademark of +# DENSO WAVE INCORPORATED +# http://www.denso-wave.com/qrcode/faqpatent-e.html + + +requests +2.28.2 +Apache Software License + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + +rfc3986 +1.5.0 +Apache Software License +Copyright 2014 Ian Cordasco, Rackspace + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + + +s3transfer +0.6.0 +Apache Software License + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + +six +1.16.0 +MIT License +Copyright (c) 2010-2020 Benjamin Peterson + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + + +sniffio +1.3.0 +Apache Software License; MIT License +This software is made available under the terms of *either* of the +licenses found in LICENSE.APACHE2 or LICENSE.MIT. Contributions to are +made under the terms of *both* these licenses. + + +sqlparse +0.4.3 +BSD License +Copyright (c) 2016, Andi Albrecht +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, +are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + * Neither the name of the authors nor the names of its contributors may be + used to endorse or promote products derived from this software without + specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR +ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +tinycss2 +1.2.1 +BSD License +BSD 3-Clause License + +Copyright (c) 2013-2020, Simon Sapin and contributors. +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +* Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +tomli +2.0.1 +MIT License +MIT License + +Copyright (c) 2021 Taneli Hukkinen + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + + +typing_extensions +4.5.0 +Python Software Foundation License +A. HISTORY OF THE SOFTWARE +========================== + +Python was created in the early 1990s by Guido van Rossum at Stichting +Mathematisch Centrum (CWI, see http://www.cwi.nl) in the Netherlands +as a successor of a language called ABC. Guido remains Python's +principal author, although it includes many contributions from others. + +In 1995, Guido continued his work on Python at the Corporation for +National Research Initiatives (CNRI, see http://www.cnri.reston.va.us) +in Reston, Virginia where he released several versions of the +software. + +In May 2000, Guido and the Python core development team moved to +BeOpen.com to form the BeOpen PythonLabs team. In October of the same +year, the PythonLabs team moved to Digital Creations, which became +Zope Corporation. In 2001, the Python Software Foundation (PSF, see +https://www.python.org/psf/) was formed, a non-profit organization +created specifically to own Python-related Intellectual Property. +Zope Corporation was a sponsoring member of the PSF. + +All Python releases are Open Source (see http://www.opensource.org for +the Open Source Definition). Historically, most, but not all, Python +releases have also been GPL-compatible; the table below summarizes +the various releases. + + Release Derived Year Owner GPL- + from compatible? (1) + + 0.9.0 thru 1.2 1991-1995 CWI yes + 1.3 thru 1.5.2 1.2 1995-1999 CNRI yes + 1.6 1.5.2 2000 CNRI no + 2.0 1.6 2000 BeOpen.com no + 1.6.1 1.6 2001 CNRI yes (2) + 2.1 2.0+1.6.1 2001 PSF no + 2.0.1 2.0+1.6.1 2001 PSF yes + 2.1.1 2.1+2.0.1 2001 PSF yes + 2.1.2 2.1.1 2002 PSF yes + 2.1.3 2.1.2 2002 PSF yes + 2.2 and above 2.1.1 2001-now PSF yes + +Footnotes: + +(1) GPL-compatible doesn't mean that we're distributing Python under + the GPL. All Python licenses, unlike the GPL, let you distribute + a modified version without making your changes open source. The + GPL-compatible licenses make it possible to combine Python with + other software that is released under the GPL; the others don't. + +(2) According to Richard Stallman, 1.6.1 is not GPL-compatible, + because its license has a choice of law clause. According to + CNRI, however, Stallman's lawyer has told CNRI's lawyer that 1.6.1 + is "not incompatible" with the GPL. + +Thanks to the many outside volunteers who have worked under Guido's +direction to make these releases possible. + + +B. TERMS AND CONDITIONS FOR ACCESSING OR OTHERWISE USING PYTHON +=============================================================== + +PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2 +-------------------------------------------- + +1. This LICENSE AGREEMENT is between the Python Software Foundation +("PSF"), and the Individual or Organization ("Licensee") accessing and +otherwise using this software ("Python") in source or binary form and +its associated documentation. + +2. Subject to the terms and conditions of this License Agreement, PSF hereby +grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce, +analyze, test, perform and/or display publicly, prepare derivative works, +distribute, and otherwise use Python alone or in any derivative version, +provided, however, that PSF's License Agreement and PSF's notice of copyright, +i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, +2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020, 2021, 2022 Python Software Foundation; +All Rights Reserved" are retained in Python alone or in any derivative version +prepared by Licensee. + +3. In the event Licensee prepares a derivative work that is based on +or incorporates Python or any part thereof, and wants to make +the derivative work available to others as provided herein, then +Licensee hereby agrees to include in any such work a brief summary of +the changes made to Python. + +4. PSF is making Python available to Licensee on an "AS IS" +basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT +INFRINGE ANY THIRD PARTY RIGHTS. + +5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON +FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS +A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON, +OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + +6. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +7. Nothing in this License Agreement shall be deemed to create any +relationship of agency, partnership, or joint venture between PSF and +Licensee. This License Agreement does not grant permission to use PSF +trademarks or trade name in a trademark sense to endorse or promote +products or services of Licensee, or any third party. + +8. By copying, installing or otherwise using Python, Licensee +agrees to be bound by the terms and conditions of this License +Agreement. + + +BEOPEN.COM LICENSE AGREEMENT FOR PYTHON 2.0 +------------------------------------------- + +BEOPEN PYTHON OPEN SOURCE LICENSE AGREEMENT VERSION 1 + +1. This LICENSE AGREEMENT is between BeOpen.com ("BeOpen"), having an +office at 160 Saratoga Avenue, Santa Clara, CA 95051, and the +Individual or Organization ("Licensee") accessing and otherwise using +this software in source or binary form and its associated +documentation ("the Software"). + +2. Subject to the terms and conditions of this BeOpen Python License +Agreement, BeOpen hereby grants Licensee a non-exclusive, +royalty-free, world-wide license to reproduce, analyze, test, perform +and/or display publicly, prepare derivative works, distribute, and +otherwise use the Software alone or in any derivative version, +provided, however, that the BeOpen Python License is retained in the +Software, alone or in any derivative version prepared by Licensee. + +3. BeOpen is making the Software available to Licensee on an "AS IS" +basis. BEOPEN MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, BEOPEN MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF THE SOFTWARE WILL NOT +INFRINGE ANY THIRD PARTY RIGHTS. + +4. BEOPEN SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF THE +SOFTWARE FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS +AS A RESULT OF USING, MODIFYING OR DISTRIBUTING THE SOFTWARE, OR ANY +DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + +5. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +6. This License Agreement shall be governed by and interpreted in all +respects by the law of the State of California, excluding conflict of +law provisions. Nothing in this License Agreement shall be deemed to +create any relationship of agency, partnership, or joint venture +between BeOpen and Licensee. This License Agreement does not grant +permission to use BeOpen trademarks or trade names in a trademark +sense to endorse or promote products or services of Licensee, or any +third party. As an exception, the "BeOpen Python" logos available at +http://www.pythonlabs.com/logos.html may be used according to the +permissions granted on that web page. + +7. By copying, installing or otherwise using the software, Licensee +agrees to be bound by the terms and conditions of this License +Agreement. + + +CNRI LICENSE AGREEMENT FOR PYTHON 1.6.1 +--------------------------------------- + +1. This LICENSE AGREEMENT is between the Corporation for National +Research Initiatives, having an office at 1895 Preston White Drive, +Reston, VA 20191 ("CNRI"), and the Individual or Organization +("Licensee") accessing and otherwise using Python 1.6.1 software in +source or binary form and its associated documentation. + +2. Subject to the terms and conditions of this License Agreement, CNRI +hereby grants Licensee a nonexclusive, royalty-free, world-wide +license to reproduce, analyze, test, perform and/or display publicly, +prepare derivative works, distribute, and otherwise use Python 1.6.1 +alone or in any derivative version, provided, however, that CNRI's +License Agreement and CNRI's notice of copyright, i.e., "Copyright (c) +1995-2001 Corporation for National Research Initiatives; All Rights +Reserved" are retained in Python 1.6.1 alone or in any derivative +version prepared by Licensee. Alternately, in lieu of CNRI's License +Agreement, Licensee may substitute the following text (omitting the +quotes): "Python 1.6.1 is made available subject to the terms and +conditions in CNRI's License Agreement. This Agreement together with +Python 1.6.1 may be located on the internet using the following +unique, persistent identifier (known as a handle): 1895.22/1013. This +Agreement may also be obtained from a proxy server on the internet +using the following URL: http://hdl.handle.net/1895.22/1013". + +3. In the event Licensee prepares a derivative work that is based on +or incorporates Python 1.6.1 or any part thereof, and wants to make +the derivative work available to others as provided herein, then +Licensee hereby agrees to include in any such work a brief summary of +the changes made to Python 1.6.1. + +4. CNRI is making Python 1.6.1 available to Licensee on an "AS IS" +basis. CNRI MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, CNRI MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON 1.6.1 WILL NOT +INFRINGE ANY THIRD PARTY RIGHTS. + +5. CNRI SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON +1.6.1 FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS +A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 1.6.1, +OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + +6. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +7. This License Agreement shall be governed by the federal +intellectual property law of the United States, including without +limitation the federal copyright law, and, to the extent such +U.S. federal law does not apply, by the law of the Commonwealth of +Virginia, excluding Virginia's conflict of law provisions. +Notwithstanding the foregoing, with regard to derivative works based +on Python 1.6.1 that incorporate non-separable material that was +previously distributed under the GNU General Public License (GPL), the +law of the Commonwealth of Virginia shall govern this License +Agreement only as to issues arising under or with respect to +Paragraphs 4, 5, and 7 of this License Agreement. Nothing in this +License Agreement shall be deemed to create any relationship of +agency, partnership, or joint venture between CNRI and Licensee. This +License Agreement does not grant permission to use CNRI trademarks or +trade name in a trademark sense to endorse or promote products or +services of Licensee, or any third party. + +8. By clicking on the "ACCEPT" button where indicated, or by copying, +installing or otherwise using Python 1.6.1, Licensee agrees to be +bound by the terms and conditions of this License Agreement. + + ACCEPT + + +CWI LICENSE AGREEMENT FOR PYTHON 0.9.0 THROUGH 1.2 +-------------------------------------------------- + +Copyright (c) 1991 - 1995, Stichting Mathematisch Centrum Amsterdam, +The Netherlands. All rights reserved. + +Permission to use, copy, modify, and distribute this software and its +documentation for any purpose and without fee is hereby granted, +provided that the above copyright notice appear in all copies and that +both that copyright notice and this permission notice appear in +supporting documentation, and that the name of Stichting Mathematisch +Centrum or CWI not be used in advertising or publicity pertaining to +distribution of the software without specific, written prior +permission. + +STICHTING MATHEMATISCH CENTRUM DISCLAIMS ALL WARRANTIES WITH REGARD TO +THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND +FITNESS, IN NO EVENT SHALL STICHTING MATHEMATISCH CENTRUM BE LIABLE +FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + + +urllib3 +1.26.15 +MIT License +MIT License + +Copyright (c) 2008-2020 Andrey Petrov and contributors (see CONTRIBUTORS.txt) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + + +uvicorn +0.20.0 +BSD License +Copyright © 2017-present, [Encode OSS Ltd](https://www.encode.io/). +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +* Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +vine +5.0.0 +BSD License +Copyright (c) 2015-2016 Ask Solem & contributors. All rights reserved. + +Vine is licensed under The BSD License (3 Clause, also known as +the new BSD license). The license is an OSI approved Open Source +license and is GPL-compatible(1). + +The license text can also be found here: +http://www.opensource.org/licenses/BSD-3-Clause + +License +======= + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Ask Solem, nor the + names of its contributors may be used to endorse or promote products + derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL Ask Solem OR CONTRIBUTORS +BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +POSSIBILITY OF SUCH DAMAGE. + +Documentation License +===================== + +The documentation portion of Vine (the rendered contents of the +"docs" directory of a software distribution or checkout) is supplied +under the "Creative Commons Attribution-ShareAlike 4.0 +International" (CC BY-SA 4.0) License as described by +http://creativecommons.org/licenses/by-sa/4.0/ + +Footnotes +========= +(1) A GPL-compatible license makes it possible to + combine Vine with other software that is released + under the GPL, it does not mean that we're distributing + Vine under the GPL license. The BSD license, unlike the GPL, + let you distribute a modified version without making your + changes open source. + + +watchdog +2.2.0 +Apache Software License +Copyright 2011 Yesudeep Mangalapilly +Copyright 2012 Google, Inc & contributors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + + +weasyprint +57.2 +BSD License +BSD 3-Clause License + +Copyright (c) 2011-2021, Simon Sapin and contributors. +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +* Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +webencodings +0.5.1 +BSD License +UNKNOWN + +whitenoise +6.2.0 +MIT License +The MIT License (MIT) + +Copyright (c) 2013 David Evans + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + + +wrapt +1.15.0 +BSD License +Copyright (c) 2013-2023, Graham Dumpleton +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE +LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +POSSIBILITY OF SUCH DAMAGE. + + +zipstream-new +1.1.8 +GNU General Public License v3 (GPLv3) + GNU GENERAL PUBLIC LICENSE + Version 3, 29 June 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The GNU General Public License is a free, copyleft license for +software and other kinds of works. + + The licenses for most software and other practical works are designed +to take away your freedom to share and change the works. By contrast, +the GNU General Public License is intended to guarantee your freedom to +share and change all versions of a program--to make sure it remains free +software for all its users. We, the Free Software Foundation, use the +GNU General Public License for most of our software; it applies also to +any other work released this way by its authors. You can apply it to +your programs, too. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +them if you wish), that you receive source code or can get it if you +want it, that you can change the software or use pieces of it in new +free programs, and that you know you can do these things. + + To protect your rights, we need to prevent others from denying you +these rights or asking you to surrender the rights. Therefore, you have +certain responsibilities if you distribute copies of the software, or if +you modify it: responsibilities to respect the freedom of others. + + For example, if you distribute copies of such a program, whether +gratis or for a fee, you must pass on to the recipients the same +freedoms that you received. You must make sure that they, too, receive +or can get the source code. And you must show them these terms so they +know their rights. + + Developers that use the GNU GPL protect your rights with two steps: +(1) assert copyright on the software, and (2) offer you this License +giving you legal permission to copy, distribute and/or modify it. + + For the developers' and authors' protection, the GPL clearly explains +that there is no warranty for this free software. For both users' and +authors' sake, the GPL requires that modified versions be marked as +changed, so that their problems will not be attributed erroneously to +authors of previous versions. + + Some devices are designed to deny users access to install or run +modified versions of the software inside them, although the manufacturer +can do so. This is fundamentally incompatible with the aim of +protecting users' freedom to change the software. The systematic +pattern of such abuse occurs in the area of products for individuals to +use, which is precisely where it is most unacceptable. Therefore, we +have designed this version of the GPL to prohibit the practice for those +products. If such problems arise substantially in other domains, we +stand ready to extend this provision to those domains in future versions +of the GPL, as needed to protect the freedom of users. + + Finally, every program is threatened constantly by software patents. +States should not allow patents to restrict development and use of +software on general-purpose computers, but in those that do, we wish to +avoid the special danger that patents applied to a free program could +make it effectively proprietary. To prevent this, the GPL assures that +patents cannot be used to render the program non-free. + + The precise terms and conditions for copying, distribution and +modification follow. + + TERMS AND CONDITIONS + + 0. Definitions. + + "This License" refers to version 3 of the GNU General Public License. + + "Copyright" also means copyright-like laws that apply to other kinds of +works, such as semiconductor masks. + + "The Program" refers to any copyrightable work licensed under this +License. Each licensee is addressed as "you". "Licensees" and +"recipients" may be individuals or organizations. + + To "modify" a work means to copy from or adapt all or part of the work +in a fashion requiring copyright permission, other than the making of an +exact copy. The resulting work is called a "modified version" of the +earlier work or a work "based on" the earlier work. + + A "covered work" means either the unmodified Program or a work based +on the Program. + + To "propagate" a work means to do anything with it that, without +permission, would make you directly or secondarily liable for +infringement under applicable copyright law, except executing it on a +computer or modifying a private copy. Propagation includes copying, +distribution (with or without modification), making available to the +public, and in some countries other activities as well. + + To "convey" a work means any kind of propagation that enables other +parties to make or receive copies. Mere interaction with a user through +a computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays "Appropriate Legal Notices" +to the extent that it includes a convenient and prominently visible +feature that (1) displays an appropriate copyright notice, and (2) +tells the user that there is no warranty for the work (except to the +extent that warranties are provided), that licensees may convey the +work under this License, and how to view a copy of this License. If +the interface presents a list of user commands or options, such as a +menu, a prominent item in the list meets this criterion. + + 1. Source Code. + + The "source code" for a work means the preferred form of the work +for making modifications to it. "Object code" means any non-source +form of a work. + + A "Standard Interface" means an interface that either is an official +standard defined by a recognized standards body, or, in the case of +interfaces specified for a particular programming language, one that +is widely used among developers working in that language. + + The "System Libraries" of an executable work include anything, other +than the work as a whole, that (a) is included in the normal form of +packaging a Major Component, but which is not part of that Major +Component, and (b) serves only to enable use of the work with that +Major Component, or to implement a Standard Interface for which an +implementation is available to the public in source code form. A +"Major Component", in this context, means a major essential component +(kernel, window system, and so on) of the specific operating system +(if any) on which the executable work runs, or a compiler used to +produce the work, or an object code interpreter used to run it. + + The "Corresponding Source" for a work in object code form means all +the source code needed to generate, install, and (for an executable +work) run the object code and to modify the work, including scripts to +control those activities. However, it does not include the work's +System Libraries, or general-purpose tools or generally available free +programs which are used unmodified in performing those activities but +which are not part of the work. For example, Corresponding Source +includes interface definition files associated with source files for +the work, and the source code for shared libraries and dynamically +linked subprograms that the work is specifically designed to require, +such as by intimate data communication or control flow between those +subprograms and other parts of the work. + + The Corresponding Source need not include anything that users +can regenerate automatically from other parts of the Corresponding +Source. + + The Corresponding Source for a work in source code form is that +same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of +copyright on the Program, and are irrevocable provided the stated +conditions are met. This License explicitly affirms your unlimited +permission to run the unmodified Program. The output from running a +covered work is covered by this License only if the output, given its +content, constitutes a covered work. This License acknowledges your +rights of fair use or other equivalent, as provided by copyright law. + + You may make, run and propagate covered works that you do not +convey, without conditions so long as your license otherwise remains +in force. You may convey covered works to others for the sole purpose +of having them make modifications exclusively for you, or provide you +with facilities for running those works, provided that you comply with +the terms of this License in conveying all material for which you do +not control copyright. Those thus making or running the covered works +for you must do so exclusively on your behalf, under your direction +and control, on terms that prohibit them from making any copies of +your copyrighted material outside their relationship with you. + + Conveying under any other circumstances is permitted solely under +the conditions stated below. Sublicensing is not allowed; section 10 +makes it unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological +measure under any applicable law fulfilling obligations under article +11 of the WIPO copyright treaty adopted on 20 December 1996, or +similar laws prohibiting or restricting circumvention of such +measures. + + When you convey a covered work, you waive any legal power to forbid +circumvention of technological measures to the extent such circumvention +is effected by exercising rights under this License with respect to +the covered work, and you disclaim any intention to limit operation or +modification of the work as a means of enforcing, against the work's +users, your or third parties' legal rights to forbid circumvention of +technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you +receive it, in any medium, provided that you conspicuously and +appropriately publish on each copy an appropriate copyright notice; +keep intact all notices stating that this License and any +non-permissive terms added in accord with section 7 apply to the code; +keep intact all notices of the absence of any warranty; and give all +recipients a copy of this License along with the Program. + + You may charge any price or no price for each copy that you convey, +and you may offer support or warranty protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to +produce it from the Program, in the form of source code under the +terms of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified + it, and giving a relevant date. + + b) The work must carry prominent notices stating that it is + released under this License and any conditions added under section + 7. This requirement modifies the requirement in section 4 to + "keep intact all notices". + + c) You must license the entire work, as a whole, under this + License to anyone who comes into possession of a copy. This + License will therefore apply, along with any applicable section 7 + additional terms, to the whole of the work, and all its parts, + regardless of how they are packaged. This License gives no + permission to license the work in any other way, but it does not + invalidate such permission if you have separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your + work need not make them do so. + + A compilation of a covered work with other separate and independent +works, which are not by their nature extensions of the covered work, +and which are not combined with it such as to form a larger program, +in or on a volume of a storage or distribution medium, is called an +"aggregate" if the compilation and its resulting copyright are not +used to limit the access or legal rights of the compilation's users +beyond what the individual works permit. Inclusion of a covered work +in an aggregate does not cause this License to apply to the other +parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms +of sections 4 and 5, provided that you also convey the +machine-readable Corresponding Source under the terms of this License, +in one of these ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium + customarily used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a + written offer, valid for at least three years and valid for as + long as you offer spare parts or customer support for that product + model, to give anyone who possesses the object code either (1) a + copy of the Corresponding Source for all the software in the + product that is covered by this License, on a durable physical + medium customarily used for software interchange, for a price no + more than your reasonable cost of physically performing this + conveying of source, or (2) access to copy the + Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This + alternative is allowed only occasionally and noncommercially, and + only if you received the object code with such an offer, in accord + with subsection 6b. + + d) Convey the object code by offering access from a designated + place (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to + copy the object code is a network server, the Corresponding Source + may be on a different server (operated by you or a third party) + that supports equivalent copying facilities, provided you maintain + clear directions next to the object code saying where to find the + Corresponding Source. Regardless of what server hosts the + Corresponding Source, you remain obligated to ensure that it is + available for as long as needed to satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided + you inform other peers where the object code and Corresponding + Source of the work are being offered to the general public at no + charge under subsection 6d. + + A separable portion of the object code, whose source code is excluded +from the Corresponding Source as a System Library, need not be +included in conveying the object code work. + + A "User Product" is either (1) a "consumer product", which means any +tangible personal property which is normally used for personal, family, +or household purposes, or (2) anything designed or sold for incorporation +into a dwelling. In determining whether a product is a consumer product, +doubtful cases shall be resolved in favor of coverage. For a particular +product received by a particular user, "normally used" refers to a +typical or common use of that class of product, regardless of the status +of the particular user or of the way in which the particular user +actually uses, or expects or is expected to use, the product. A product +is a consumer product regardless of whether the product has substantial +commercial, industrial or non-consumer uses, unless such uses represent +the only significant mode of use of the product. + + "Installation Information" for a User Product means any methods, +procedures, authorization keys, or other information required to install +and execute modified versions of a covered work in that User Product from +a modified version of its Corresponding Source. The information must +suffice to ensure that the continued functioning of the modified object +code is in no case prevented or interfered with solely because +modification has been made. + + If you convey an object code work under this section in, or with, or +specifically for use in, a User Product, and the conveying occurs as +part of a transaction in which the right of possession and use of the +User Product is transferred to the recipient in perpetuity or for a +fixed term (regardless of how the transaction is characterized), the +Corresponding Source conveyed under this section must be accompanied +by the Installation Information. But this requirement does not apply +if neither you nor any third party retains the ability to install +modified object code on the User Product (for example, the work has +been installed in ROM). + + The requirement to provide Installation Information does not include a +requirement to continue to provide support service, warranty, or updates +for a work that has been modified or installed by the recipient, or for +the User Product in which it has been modified or installed. Access to a +network may be denied when the modification itself materially and +adversely affects the operation of the network or violates the rules and +protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, +in accord with this section must be in a format that is publicly +documented (and with an implementation available to the public in +source code form), and must require no special password or key for +unpacking, reading or copying. + + 7. Additional Terms. + + "Additional permissions" are terms that supplement the terms of this +License by making exceptions from one or more of its conditions. +Additional permissions that are applicable to the entire Program shall +be treated as though they were included in this License, to the extent +that they are valid under applicable law. If additional permissions +apply only to part of the Program, that part may be used separately +under those permissions, but the entire Program remains governed by +this License without regard to the additional permissions. + + When you convey a copy of a covered work, you may at your option +remove any additional permissions from that copy, or from any part of +it. (Additional permissions may be written to require their own +removal in certain cases when you modify the work.) You may place +additional permissions on material, added by you to a covered work, +for which you have or can give appropriate copyright permission. + + Notwithstanding any other provision of this License, for material you +add to a covered work, you may (if authorized by the copyright holders of +that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some + trade names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that + material by anyone who conveys the material (or modified versions of + it) with contractual assumptions of liability to the recipient, for + any liability that these contractual assumptions directly impose on + those licensors and authors. + + All other non-permissive additional terms are considered "further +restrictions" within the meaning of section 10. If the Program as you +received it, or any part of it, contains a notice stating that it is +governed by this License along with a term that is a further +restriction, you may remove that term. If a license document contains +a further restriction but permits relicensing or conveying under this +License, you may add to a covered work material governed by the terms +of that license document, provided that the further restriction does +not survive such relicensing or conveying. + + If you add terms to a covered work in accord with this section, you +must place, in the relevant source files, a statement of the +additional terms that apply to those files, or a notice indicating +where to find the applicable terms. + + Additional terms, permissive or non-permissive, may be stated in the +form of a separately written license, or stated as exceptions; +the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly +provided under this License. Any attempt otherwise to propagate or +modify it is void, and will automatically terminate your rights under +this License (including any patent licenses granted under the third +paragraph of section 11). + + However, if you cease all violation of this License, then your +license from a particular copyright holder is reinstated (a) +provisionally, unless and until the copyright holder explicitly and +finally terminates your license, and (b) permanently, if the copyright +holder fails to notify you of the violation by some reasonable means +prior to 60 days after the cessation. + + Moreover, your license from a particular copyright holder is +reinstated permanently if the copyright holder notifies you of the +violation by some reasonable means, this is the first time you have +received notice of violation of this License (for any work) from that +copyright holder, and you cure the violation prior to 30 days after +your receipt of the notice. + + Termination of your rights under this section does not terminate the +licenses of parties who have received copies or rights from you under +this License. If your rights have been terminated and not permanently +reinstated, you do not qualify to receive new licenses for the same +material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or +run a copy of the Program. Ancillary propagation of a covered work +occurring solely as a consequence of using peer-to-peer transmission +to receive a copy likewise does not require acceptance. However, +nothing other than this License grants you permission to propagate or +modify any covered work. These actions infringe copyright if you do +not accept this License. Therefore, by modifying or propagating a +covered work, you indicate your acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically +receives a license from the original licensors, to run, modify and +propagate that work, subject to this License. You are not responsible +for enforcing compliance by third parties with this License. + + An "entity transaction" is a transaction transferring control of an +organization, or substantially all assets of one, or subdividing an +organization, or merging organizations. If propagation of a covered +work results from an entity transaction, each party to that +transaction who receives a copy of the work also receives whatever +licenses to the work the party's predecessor in interest had or could +give under the previous paragraph, plus a right to possession of the +Corresponding Source of the work from the predecessor in interest, if +the predecessor has it or can get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the +rights granted or affirmed under this License. For example, you may +not impose a license fee, royalty, or other charge for exercise of +rights granted under this License, and you may not initiate litigation +(including a cross-claim or counterclaim in a lawsuit) alleging that +any patent claim is infringed by making, using, selling, offering for +sale, or importing the Program or any portion of it. + + 11. Patents. + + A "contributor" is a copyright holder who authorizes use under this +License of the Program or a work on which the Program is based. The +work thus licensed is called the contributor's "contributor version". + + A contributor's "essential patent claims" are all patent claims +owned or controlled by the contributor, whether already acquired or +hereafter acquired, that would be infringed by some manner, permitted +by this License, of making, using, or selling its contributor version, +but do not include claims that would be infringed only as a +consequence of further modification of the contributor version. For +purposes of this definition, "control" includes the right to grant +patent sublicenses in a manner consistent with the requirements of +this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free +patent license under the contributor's essential patent claims, to +make, use, sell, offer for sale, import and otherwise run, modify and +propagate the contents of its contributor version. + + In the following three paragraphs, a "patent license" is any express +agreement or commitment, however denominated, not to enforce a patent +(such as an express permission to practice a patent or covenant not to +sue for patent infringement). To "grant" such a patent license to a +party means to make such an agreement or commitment not to enforce a +patent against the party. + + If you convey a covered work, knowingly relying on a patent license, +and the Corresponding Source of the work is not available for anyone +to copy, free of charge and under the terms of this License, through a +publicly available network server or other readily accessible means, +then you must either (1) cause the Corresponding Source to be so +available, or (2) arrange to deprive yourself of the benefit of the +patent license for this particular work, or (3) arrange, in a manner +consistent with the requirements of this License, to extend the patent +license to downstream recipients. "Knowingly relying" means you have +actual knowledge that, but for the patent license, your conveying the +covered work in a country, or your recipient's use of the covered work +in a country, would infringe one or more identifiable patents in that +country that you have reason to believe are valid. + + If, pursuant to or in connection with a single transaction or +arrangement, you convey, or propagate by procuring conveyance of, a +covered work, and grant a patent license to some of the parties +receiving the covered work authorizing them to use, propagate, modify +or convey a specific copy of the covered work, then the patent license +you grant is automatically extended to all recipients of the covered +work and works based on it. + + A patent license is "discriminatory" if it does not include within +the scope of its coverage, prohibits the exercise of, or is +conditioned on the non-exercise of one or more of the rights that are +specifically granted under this License. You may not convey a covered +work if you are a party to an arrangement with a third party that is +in the business of distributing software, under which you make payment +to the third party based on the extent of your activity of conveying +the work, and under which the third party grants, to any of the +parties who would receive the covered work from you, a discriminatory +patent license (a) in connection with copies of the covered work +conveyed by you (or copies made from those copies), or (b) primarily +for and in connection with specific products or compilations that +contain the covered work, unless you entered into that arrangement, +or that patent license was granted, prior to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting +any implied license or other defenses to infringement that may +otherwise be available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot convey a +covered work so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you may +not convey it at all. For example, if you agree to terms that obligate you +to collect a royalty for further conveying from those to whom you convey +the Program, the only way you could satisfy both those terms and this +License would be to refrain entirely from conveying the Program. + + 13. Use with the GNU Affero General Public License. + + Notwithstanding any other provision of this License, you have +permission to link or combine any covered work with a work licensed +under version 3 of the GNU Affero General Public License into a single +combined work, and to convey the resulting work. The terms of this +License will continue to apply to the part which is the covered work, +but the special requirements of the GNU Affero General Public License, +section 13, concerning interaction through a network will apply to the +combination as such. + + 14. Revised Versions of this License. + + The Free Software Foundation may publish revised and/or new versions of +the GNU General Public License from time to time. Such new versions will +be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + + Each version is given a distinguishing version number. If the +Program specifies that a certain numbered version of the GNU General +Public License "or any later version" applies to it, you have the +option of following the terms and conditions either of that numbered +version or of any later version published by the Free Software +Foundation. If the Program does not specify a version number of the +GNU General Public License, you may choose any version ever published +by the Free Software Foundation. + + If the Program specifies that a proxy can decide which future +versions of the GNU General Public License can be used, that proxy's +public statement of acceptance of a version permanently authorizes you +to choose that version for the Program. + + Later license versions may give you additional or different +permissions. However, no additional obligations are imposed on any +author or copyright holder as a result of your choosing to follow a +later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY +APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT +HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY +OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM +IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF +ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS +THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY +GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE +USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF +DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD +PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), +EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF +SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided +above cannot be given local legal effect according to their terms, +reviewing courts shall apply local law that most closely approximates +an absolute waiver of all civil liability in connection with the +Program, unless a warranty or assumption of liability accompanies a +copy of the Program in return for a fee. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +state the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see . + +Also add information on how to contact you by electronic and paper mail. + + If the program does terminal interaction, make it output a short +notice like this when it starts in an interactive mode: + + Copyright (C) + This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. + This is free software, and you are welcome to redistribute it + under certain conditions; type `show c' for details. + +The hypothetical commands `show w' and `show c' should show the appropriate +parts of the General Public License. Of course, your program's commands +might be different; for a GUI interface, you would use an "about box". + + You should also get your employer (if you work as a programmer) or school, +if any, to sign a "copyright disclaimer" for the program, if necessary. +For more information on this, and how to apply and follow the GNU GPL, see +. + + The GNU General Public License does not permit incorporating your program +into proprietary programs. If your program is a subroutine library, you +may consider it more useful to permit linking proprietary applications with +the library. If this is what you want to do, use the GNU Lesser General +Public License instead of this License. But first, please read +. + + +zopfli +0.2.2 +Apache Software License + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2011 Google Inc. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + + + +webencodings +0.5.1 +BSD License +Copyright (c) 2012 by Simon Sapin. + +Some rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials provided + with the distribution. + + * The names of the contributors may not be used to endorse or + promote products derived from this software without specific + prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/api/README.md b/api/README.md new file mode 100644 index 0000000..e462ddd --- /dev/null +++ b/api/README.md @@ -0,0 +1 @@ +# ReportCreator API diff --git a/api/download_fonts.sh b/api/download_fonts.sh new file mode 100644 index 0000000..79599cf --- /dev/null +++ b/api/download_fonts.sh @@ -0,0 +1,32 @@ +#!/bin/bash +set -e + +# Download google fonts +while IFS= read -r fontname; do + FONTNAME_URL=$(echo "${fontname}" | tr " " "+") + FONTNAME_FS=$(echo "${fontname}" | tr "[:upper:]" "[:lower:]" | tr " " "_") + wget https://fonts.google.com/download?family=${FONTNAME_URL} -O /tmp/${FONTNAME_FS}.zip --quiet + mkdir -p /usr/share/fonts/truetype/${FONTNAME_FS}/ + unzip -q /tmp/${FONTNAME_FS}.zip -d /usr/share/fonts/truetype/${FONTNAME_FS}/ + if [[ ${FONTNAME_FS} = 'roboto_serif' ]]; then + mv /usr/share/fonts/truetype/${FONTNAME_FS}/ /tmp/roboto_serif_all/ + mv /tmp/roboto_serif_all/static/RobotoSerif/ /usr/share/fonts/truetype/${FONTNAME_FS}/ + rm -rf /tmp/roboto_serif_all/ + fi + rm -f /tmp/${FONTNAME_FS}.zip +done << EOF +Open Sans +Roboto +Roboto Serif +Lato +Exo +Tinos +Source Code Pro +Roboto Mono +Courier Prime +EOF +# Fonts installed with package manager: +# Noto: Noto Sans, Noto Serif, Noto Mono + +# Update font cache +fc-cache -f diff --git a/api/generate_notice.sh b/api/generate_notice.sh new file mode 100755 index 0000000..0a29c5f --- /dev/null +++ b/api/generate_notice.sh @@ -0,0 +1,69 @@ +#!/bin/bash +set -e +# Any subsequent(*) commands which fail will cause the shell script to exit immediately + +allow_only="MIT" +allow_only="$allow_only;MIT License" +allow_only="$allow_only;BSD License" +allow_only="$allow_only;Apache Software License" +allow_only="$allow_only;GNU General Public License v2 or later (GPLv2+)" +allow_only="$allow_only;GNU General Public License v2 (GPLv2)" +allow_only="$allow_only;GNU General Public License v3 (GPLv3)" +allow_only="$allow_only;GNU Library or Lesser General Public License (LGPL)" +allow_only="$allow_only;GNU Lesser General Public License v2 or later (LGPLv2+)" +allow_only="$allow_only;Mozilla Public License 1.0 (MPL)" +allow_only="$allow_only;Mozilla Public License 1.1 (MPL 1.1)" +allow_only="$allow_only;Mozilla Public License 2.0 (MPL 2.0)" +allow_only="$allow_only;Historical Permission Notice and Disclaimer (HPND)" +allow_only="$allow_only;Python Software Foundation License" + +ignore="jsonschema" +ignore="$ignore;webencodings" + + +pip3 install pip-licenses +pip-licenses --allow-only "$allow_only" >/dev/null +pip-licenses -l --no-license-path -f plain-vertical --ignore-packages "$ignore" > NOTICE + + +# Those packages do not include valid license files +webencodings_license='''Copyright (c) 2012 by Simon Sapin. + +Some rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials provided + with the distribution. + + * The names of the contributors may not be used to endorse or + promote products derived from this software without specific + prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.''' + + +echo "" >> NOTICE +echo "webencodings" >> NOTICE +version=`pip freeze | grep webencodings | cut -d"=" -f 3` +echo "$version" >> NOTICE +echo "BSD License" >> NOTICE +echo "$webencodings_license" >> NOTICE + diff --git a/api/poetry.lock b/api/poetry.lock new file mode 100644 index 0000000..80bee41 --- /dev/null +++ b/api/poetry.lock @@ -0,0 +1,2595 @@ +# This file is automatically @generated by Poetry 1.4.0 and should not be changed by hand. + +[[package]] +name = "adrf" +version = "0.1.0" +description = "Async support for Django REST framework" +category = "main" +optional = false +python-versions = ">=3.8" +files = [ + {file = "adrf-0.1.0-py3-none-any.whl", hash = "sha256:76ef3b335668a383e0c1dd42f442dc0c80e135e59a204c459603206b24f0758e"}, + {file = "adrf-0.1.0.tar.gz", hash = "sha256:799e4abca989a64bab9cf25fcab0ae902dec8cbd2d8961258375c8ea68593cd6"}, +] + +[package.dependencies] +django = ">=4.1" +djangorestframework = ">=3.14.0" + +[[package]] +name = "amqp" +version = "5.1.1" +description = "Low-level AMQP client for Python (fork of amqplib)." +category = "main" +optional = false +python-versions = ">=3.6" +files = [ + {file = "amqp-5.1.1-py3-none-any.whl", hash = "sha256:6f0956d2c23d8fa6e7691934d8c3930eadb44972cbbd1a7ae3a520f735d43359"}, + {file = "amqp-5.1.1.tar.gz", hash = "sha256:2c1b13fecc0893e946c65cbd5f36427861cffa4ea2201d8f6fca22e2a373b5e2"}, +] + +[package.dependencies] +vine = ">=5.0.0" + +[[package]] +name = "anyio" +version = "3.6.2" +description = "High level compatibility layer for multiple asynchronous event loop implementations" +category = "main" +optional = false +python-versions = ">=3.6.2" +files = [ + {file = "anyio-3.6.2-py3-none-any.whl", hash = "sha256:fbbe32bd270d2a2ef3ed1c5d45041250284e31fc0a4df4a5a6071842051a51e3"}, + {file = "anyio-3.6.2.tar.gz", hash = "sha256:25ea0d673ae30af41a0c442f81cf3b38c7e79fdc7b60335a4c14e05eb0947421"}, +] + +[package.dependencies] +idna = ">=2.8" +sniffio = ">=1.1" + +[package.extras] +doc = ["packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] +test = ["contextlib2", "coverage[toml] (>=4.5)", "hypothesis (>=4.0)", "mock (>=4)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (<0.15)", "uvloop (>=0.15)"] +trio = ["trio (>=0.16,<0.22)"] + +[[package]] +name = "asgiref" +version = "3.6.0" +description = "ASGI specs, helper code, and adapters" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "asgiref-3.6.0-py3-none-any.whl", hash = "sha256:71e68008da809b957b7ee4b43dbccff33d1b23519fb8344e33f049897077afac"}, + {file = "asgiref-3.6.0.tar.gz", hash = "sha256:9567dfe7bd8d3c8c892227827c41cce860b368104c3431da67a0c5a65a949506"}, +] + +[package.extras] +tests = ["mypy (>=0.800)", "pytest", "pytest-asyncio"] + +[[package]] +name = "attrs" +version = "22.2.0" +description = "Classes Without Boilerplate" +category = "main" +optional = false +python-versions = ">=3.6" +files = [ + {file = "attrs-22.2.0-py3-none-any.whl", hash = "sha256:29e95c7f6778868dbd49170f98f8818f78f3dc5e0e37c0b1f474e3561b240836"}, + {file = "attrs-22.2.0.tar.gz", hash = "sha256:c9227bfc2f01993c03f68db37d1d15c9690188323c067c641f1a35ca58185f99"}, +] + +[package.extras] +cov = ["attrs[tests]", "coverage-enable-subprocess", "coverage[toml] (>=5.3)"] +dev = ["attrs[docs,tests]"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope.interface"] +tests = ["attrs[tests-no-zope]", "zope.interface"] +tests-no-zope = ["cloudpickle", "cloudpickle", "hypothesis", "hypothesis", "mypy (>=0.971,<0.990)", "mypy (>=0.971,<0.990)", "pympler", "pympler", "pytest (>=4.3.0)", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-mypy-plugins", "pytest-xdist[psutil]", "pytest-xdist[psutil]"] + +[[package]] +name = "authlib" +version = "1.2.0" +description = "The ultimate Python library in building OAuth and OpenID Connect servers and clients." +category = "main" +optional = false +python-versions = "*" +files = [ + {file = "Authlib-1.2.0-py2.py3-none-any.whl", hash = "sha256:4ddf4fd6cfa75c9a460b361d4bd9dac71ffda0be879dbe4292a02e92349ad55a"}, + {file = "Authlib-1.2.0.tar.gz", hash = "sha256:4fa3e80883a5915ef9f5bc28630564bc4ed5b5af39812a3ff130ec76bd631e9d"}, +] + +[package.dependencies] +cryptography = ">=3.2" + +[[package]] +name = "billiard" +version = "3.6.4.0" +description = "Python multiprocessing fork with improvements and bugfixes" +category = "main" +optional = false +python-versions = "*" +files = [ + {file = "billiard-3.6.4.0-py3-none-any.whl", hash = "sha256:87103ea78fa6ab4d5c751c4909bcff74617d985de7fa8b672cf8618afd5a875b"}, + {file = "billiard-3.6.4.0.tar.gz", hash = "sha256:299de5a8da28a783d51b197d496bef4f1595dd023a93a4f59dde1886ae905547"}, +] + +[[package]] +name = "boto3" +version = "1.26.110" +description = "The AWS SDK for Python" +category = "main" +optional = false +python-versions = ">= 3.7" +files = [ + {file = "boto3-1.26.110-py3-none-any.whl", hash = "sha256:8972a5e0a04ea6f477c41e390765a46ec7bcffb62f99d4a0774ce70fb87bea59"}, + {file = "boto3-1.26.110.tar.gz", hash = "sha256:97d942d958cac28687187b89ee88ac760e0fa3007094cb1d6b16e241144306f3"}, +] + +[package.dependencies] +botocore = ">=1.29.110,<1.30.0" +jmespath = ">=0.7.1,<2.0.0" +s3transfer = ">=0.6.0,<0.7.0" + +[package.extras] +crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] + +[[package]] +name = "botocore" +version = "1.29.110" +description = "Low-level, data-driven core of boto 3." +category = "main" +optional = false +python-versions = ">= 3.7" +files = [ + {file = "botocore-1.29.110-py3-none-any.whl", hash = "sha256:39879fcc3d263513f9ba92cc5060b5a4dbe54f758a917be29c7a71132e34f399"}, + {file = "botocore-1.29.110.tar.gz", hash = "sha256:9d5054159782b19f27bff3e5a65bc494dc323255e889ea3abec002711a1fb0c0"}, +] + +[package.dependencies] +jmespath = ">=0.7.1,<2.0.0" +python-dateutil = ">=2.1,<3.0.0" +urllib3 = ">=1.25.4,<1.27" + +[package.extras] +crt = ["awscrt (==0.16.9)"] + +[[package]] +name = "brotli" +version = "1.0.9" +description = "Python bindings for the Brotli compression library" +category = "main" +optional = false +python-versions = "*" +files = [ + {file = "Brotli-1.0.9-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:268fe94547ba25b58ebc724680609c8ee3e5a843202e9a381f6f9c5e8bdb5c70"}, + {file = "Brotli-1.0.9-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:c2415d9d082152460f2bd4e382a1e85aed233abc92db5a3880da2257dc7daf7b"}, + {file = "Brotli-1.0.9-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:5913a1177fc36e30fcf6dc868ce23b0453952c78c04c266d3149b3d39e1410d6"}, + {file = "Brotli-1.0.9-cp27-cp27m-win32.whl", hash = "sha256:afde17ae04d90fbe53afb628f7f2d4ca022797aa093e809de5c3cf276f61bbfa"}, + {file = "Brotli-1.0.9-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:7cb81373984cc0e4682f31bc3d6be9026006d96eecd07ea49aafb06897746452"}, + {file = "Brotli-1.0.9-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:db844eb158a87ccab83e868a762ea8024ae27337fc7ddcbfcddd157f841fdfe7"}, + {file = "Brotli-1.0.9-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:9744a863b489c79a73aba014df554b0e7a0fc44ef3f8a0ef2a52919c7d155031"}, + {file = "Brotli-1.0.9-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a72661af47119a80d82fa583b554095308d6a4c356b2a554fdc2799bc19f2a43"}, + {file = "Brotli-1.0.9-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ee83d3e3a024a9618e5be64648d6d11c37047ac48adff25f12fa4226cf23d1c"}, + {file = "Brotli-1.0.9-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:19598ecddd8a212aedb1ffa15763dd52a388518c4550e615aed88dc3753c0f0c"}, + {file = "Brotli-1.0.9-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:44bb8ff420c1d19d91d79d8c3574b8954288bdff0273bf788954064d260d7ab0"}, + {file = "Brotli-1.0.9-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e23281b9a08ec338469268f98f194658abfb13658ee98e2b7f85ee9dd06caa91"}, + {file = "Brotli-1.0.9-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:3496fc835370da351d37cada4cf744039616a6db7d13c430035e901443a34daa"}, + {file = "Brotli-1.0.9-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b83bb06a0192cccf1eb8d0a28672a1b79c74c3a8a5f2619625aeb6f28b3a82bb"}, + {file = "Brotli-1.0.9-cp310-cp310-win32.whl", hash = "sha256:26d168aac4aaec9a4394221240e8a5436b5634adc3cd1cdf637f6645cecbf181"}, + {file = "Brotli-1.0.9-cp310-cp310-win_amd64.whl", hash = "sha256:622a231b08899c864eb87e85f81c75e7b9ce05b001e59bbfbf43d4a71f5f32b2"}, + {file = "Brotli-1.0.9-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:cc0283a406774f465fb45ec7efb66857c09ffefbe49ec20b7882eff6d3c86d3a"}, + {file = "Brotli-1.0.9-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:11d3283d89af7033236fa4e73ec2cbe743d4f6a81d41bd234f24bf63dde979df"}, + {file = "Brotli-1.0.9-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c1306004d49b84bd0c4f90457c6f57ad109f5cc6067a9664e12b7b79a9948ad"}, + {file = "Brotli-1.0.9-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1375b5d17d6145c798661b67e4ae9d5496920d9265e2f00f1c2c0b5ae91fbde"}, + {file = "Brotli-1.0.9-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cab1b5964b39607a66adbba01f1c12df2e55ac36c81ec6ed44f2fca44178bf1a"}, + {file = "Brotli-1.0.9-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:8ed6a5b3d23ecc00ea02e1ed8e0ff9a08f4fc87a1f58a2530e71c0f48adf882f"}, + {file = "Brotli-1.0.9-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:cb02ed34557afde2d2da68194d12f5719ee96cfb2eacc886352cb73e3808fc5d"}, + {file = "Brotli-1.0.9-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:b3523f51818e8f16599613edddb1ff924eeb4b53ab7e7197f85cbc321cdca32f"}, + {file = "Brotli-1.0.9-cp311-cp311-win32.whl", hash = "sha256:ba72d37e2a924717990f4d7482e8ac88e2ef43fb95491eb6e0d124d77d2a150d"}, + {file = "Brotli-1.0.9-cp311-cp311-win_amd64.whl", hash = "sha256:3ffaadcaeafe9d30a7e4e1e97ad727e4f5610b9fa2f7551998471e3736738679"}, + {file = "Brotli-1.0.9-cp35-cp35m-macosx_10_6_intel.whl", hash = "sha256:c83aa123d56f2e060644427a882a36b3c12db93727ad7a7b9efd7d7f3e9cc2c4"}, + {file = "Brotli-1.0.9-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:6b2ae9f5f67f89aade1fab0f7fd8f2832501311c363a21579d02defa844d9296"}, + {file = "Brotli-1.0.9-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:68715970f16b6e92c574c30747c95cf8cf62804569647386ff032195dc89a430"}, + {file = "Brotli-1.0.9-cp35-cp35m-win32.whl", hash = "sha256:defed7ea5f218a9f2336301e6fd379f55c655bea65ba2476346340a0ce6f74a1"}, + {file = "Brotli-1.0.9-cp35-cp35m-win_amd64.whl", hash = "sha256:88c63a1b55f352b02c6ffd24b15ead9fc0e8bf781dbe070213039324922a2eea"}, + {file = "Brotli-1.0.9-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:503fa6af7da9f4b5780bb7e4cbe0c639b010f12be85d02c99452825dd0feef3f"}, + {file = "Brotli-1.0.9-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:40d15c79f42e0a2c72892bf407979febd9cf91f36f495ffb333d1d04cebb34e4"}, + {file = "Brotli-1.0.9-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:93130612b837103e15ac3f9cbacb4613f9e348b58b3aad53721d92e57f96d46a"}, + {file = "Brotli-1.0.9-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87fdccbb6bb589095f413b1e05734ba492c962b4a45a13ff3408fa44ffe6479b"}, + {file = "Brotli-1.0.9-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:6d847b14f7ea89f6ad3c9e3901d1bc4835f6b390a9c71df999b0162d9bb1e20f"}, + {file = "Brotli-1.0.9-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:495ba7e49c2db22b046a53b469bbecea802efce200dffb69b93dd47397edc9b6"}, + {file = "Brotli-1.0.9-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:4688c1e42968ba52e57d8670ad2306fe92e0169c6f3af0089be75bbac0c64a3b"}, + {file = "Brotli-1.0.9-cp36-cp36m-win32.whl", hash = "sha256:61a7ee1f13ab913897dac7da44a73c6d44d48a4adff42a5701e3239791c96e14"}, + {file = "Brotli-1.0.9-cp36-cp36m-win_amd64.whl", hash = "sha256:1c48472a6ba3b113452355b9af0a60da5c2ae60477f8feda8346f8fd48e3e87c"}, + {file = "Brotli-1.0.9-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3b78a24b5fd13c03ee2b7b86290ed20efdc95da75a3557cc06811764d5ad1126"}, + {file = "Brotli-1.0.9-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:9d12cf2851759b8de8ca5fde36a59c08210a97ffca0eb94c532ce7b17c6a3d1d"}, + {file = "Brotli-1.0.9-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:6c772d6c0a79ac0f414a9f8947cc407e119b8598de7621f39cacadae3cf57d12"}, + {file = "Brotli-1.0.9-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:29d1d350178e5225397e28ea1b7aca3648fcbab546d20e7475805437bfb0a130"}, + {file = "Brotli-1.0.9-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7bbff90b63328013e1e8cb50650ae0b9bac54ffb4be6104378490193cd60f85a"}, + {file = "Brotli-1.0.9-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:ec1947eabbaf8e0531e8e899fc1d9876c179fc518989461f5d24e2223395a9e3"}, + {file = "Brotli-1.0.9-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:12effe280b8ebfd389022aa65114e30407540ccb89b177d3fbc9a4f177c4bd5d"}, + {file = "Brotli-1.0.9-cp37-cp37m-win32.whl", hash = "sha256:f909bbbc433048b499cb9db9e713b5d8d949e8c109a2a548502fb9aa8630f0b1"}, + {file = "Brotli-1.0.9-cp37-cp37m-win_amd64.whl", hash = "sha256:97f715cf371b16ac88b8c19da00029804e20e25f30d80203417255d239f228b5"}, + {file = "Brotli-1.0.9-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e16eb9541f3dd1a3e92b89005e37b1257b157b7256df0e36bd7b33b50be73bcb"}, + {file = "Brotli-1.0.9-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:160c78292e98d21e73a4cc7f76a234390e516afcd982fa17e1422f7c6a9ce9c8"}, + {file = "Brotli-1.0.9-cp38-cp38-manylinux1_i686.whl", hash = "sha256:b663f1e02de5d0573610756398e44c130add0eb9a3fc912a09665332942a2efb"}, + {file = "Brotli-1.0.9-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:5b6ef7d9f9c38292df3690fe3e302b5b530999fa90014853dcd0d6902fb59f26"}, + {file = "Brotli-1.0.9-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8a674ac10e0a87b683f4fa2b6fa41090edfd686a6524bd8dedbd6138b309175c"}, + {file = "Brotli-1.0.9-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:e2d9e1cbc1b25e22000328702b014227737756f4b5bf5c485ac1d8091ada078b"}, + {file = "Brotli-1.0.9-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:b336c5e9cf03c7be40c47b5fd694c43c9f1358a80ba384a21969e0b4e66a9b17"}, + {file = "Brotli-1.0.9-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:85f7912459c67eaab2fb854ed2bc1cc25772b300545fe7ed2dc03954da638649"}, + {file = "Brotli-1.0.9-cp38-cp38-win32.whl", hash = "sha256:35a3edbe18e876e596553c4007a087f8bcfd538f19bc116917b3c7522fca0429"}, + {file = "Brotli-1.0.9-cp38-cp38-win_amd64.whl", hash = "sha256:269a5743a393c65db46a7bb982644c67ecba4b8d91b392403ad8a861ba6f495f"}, + {file = "Brotli-1.0.9-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2aad0e0baa04517741c9bb5b07586c642302e5fb3e75319cb62087bd0995ab19"}, + {file = "Brotli-1.0.9-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5cb1e18167792d7d21e21365d7650b72d5081ed476123ff7b8cac7f45189c0c7"}, + {file = "Brotli-1.0.9-cp39-cp39-manylinux1_i686.whl", hash = "sha256:16d528a45c2e1909c2798f27f7bf0a3feec1dc9e50948e738b961618e38b6a7b"}, + {file = "Brotli-1.0.9-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:56d027eace784738457437df7331965473f2c0da2c70e1a1f6fdbae5402e0389"}, + {file = "Brotli-1.0.9-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9bf919756d25e4114ace16a8ce91eb340eb57a08e2c6950c3cebcbe3dff2a5e7"}, + {file = "Brotli-1.0.9-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e4c4e92c14a57c9bd4cb4be678c25369bf7a092d55fd0866f759e425b9660806"}, + {file = "Brotli-1.0.9-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:e48f4234f2469ed012a98f4b7874e7f7e173c167bed4934912a29e03167cf6b1"}, + {file = "Brotli-1.0.9-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9ed4c92a0665002ff8ea852353aeb60d9141eb04109e88928026d3c8a9e5433c"}, + {file = "Brotli-1.0.9-cp39-cp39-win32.whl", hash = "sha256:cfc391f4429ee0a9370aa93d812a52e1fee0f37a81861f4fdd1f4fb28e8547c3"}, + {file = "Brotli-1.0.9-cp39-cp39-win_amd64.whl", hash = "sha256:854c33dad5ba0fbd6ab69185fec8dab89e13cda6b7d191ba111987df74f38761"}, + {file = "Brotli-1.0.9-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:9749a124280a0ada4187a6cfd1ffd35c350fb3af79c706589d98e088c5044267"}, + {file = "Brotli-1.0.9-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:73fd30d4ce0ea48010564ccee1a26bfe39323fde05cb34b5863455629db61dc7"}, + {file = "Brotli-1.0.9-pp37-pypy37_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:02177603aaca36e1fd21b091cb742bb3b305a569e2402f1ca38af471777fb019"}, + {file = "Brotli-1.0.9-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:76ffebb907bec09ff511bb3acc077695e2c32bc2142819491579a695f77ffd4d"}, + {file = "Brotli-1.0.9-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:b43775532a5904bc938f9c15b77c613cb6ad6fb30990f3b0afaea82797a402d8"}, + {file = "Brotli-1.0.9-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:5bf37a08493232fbb0f8229f1824b366c2fc1d02d64e7e918af40acd15f3e337"}, + {file = "Brotli-1.0.9-pp38-pypy38_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:330e3f10cd01da535c70d09c4283ba2df5fb78e915bea0a28becad6e2ac010be"}, + {file = "Brotli-1.0.9-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:e1abbeef02962596548382e393f56e4c94acd286bd0c5afba756cffc33670e8a"}, + {file = "Brotli-1.0.9-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3148362937217b7072cf80a2dcc007f09bb5ecb96dae4617316638194113d5be"}, + {file = "Brotli-1.0.9-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:336b40348269f9b91268378de5ff44dc6fbaa2268194f85177b53463d313842a"}, + {file = "Brotli-1.0.9-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3b8b09a16a1950b9ef495a0f8b9d0a87599a9d1f179e2d4ac014b2ec831f87e7"}, + {file = "Brotli-1.0.9-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:c8e521a0ce7cf690ca84b8cc2272ddaf9d8a50294fd086da67e517439614c755"}, + {file = "Brotli-1.0.9.zip", hash = "sha256:4d1b810aa0ed773f81dceda2cc7b403d01057458730e309856356d4ef4188438"}, +] + +[[package]] +name = "brotlicffi" +version = "1.0.9.2" +description = "Python CFFI bindings to the Brotli library" +category = "main" +optional = false +python-versions = "*" +files = [ + {file = "brotlicffi-1.0.9.2-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:408ec4359f9763280d5c4e0ad29c51d1240b25fdd18719067e972163b4125b98"}, + {file = "brotlicffi-1.0.9.2-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:2e4629f7690ded66c8818715c6d4dd6a7ff6a4f10fad6186fe99850f781ce210"}, + {file = "brotlicffi-1.0.9.2-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:137c4635edcdf593de5ce9d0daa596bf499591b16b8fca5fd72a490deb54b2ee"}, + {file = "brotlicffi-1.0.9.2-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:af8a1b7bcfccf9c41a3c8654994d6a81821fdfe4caddcfe5045bfda936546ca3"}, + {file = "brotlicffi-1.0.9.2-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:9078432af4785f35ab3840587eed7fb131e3fc77eb2a739282b649b343c584dd"}, + {file = "brotlicffi-1.0.9.2-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:7bb913d5bf3b4ce2ec59872711dc9faaff5f320c3c3827cada2d8a7b793a7753"}, + {file = "brotlicffi-1.0.9.2-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:16a0c9392a1059e2e62839fbd037d2e7e03c8ae5da65e9746f582464f7fab1bb"}, + {file = "brotlicffi-1.0.9.2-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:94d2810efc5723f1447b332223b197466190518a3eeca93b9f357efb5b22c6dc"}, + {file = "brotlicffi-1.0.9.2-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:9e70f3e20f317d70912b10dbec48b29114d3dbd0e9d88475cb328e6c086f0546"}, + {file = "brotlicffi-1.0.9.2-cp35-abi3-macosx_10_9_x86_64.whl", hash = "sha256:586f0ea3c2eed455d5f2330b9ab4a591514c8de0ee53d445645efcfbf053c69f"}, + {file = "brotlicffi-1.0.9.2-cp35-abi3-manylinux1_i686.whl", hash = "sha256:4454c3baedc277fd6e65f983e3eb8e77f4bc15060f69370a0201746e2edeca81"}, + {file = "brotlicffi-1.0.9.2-cp35-abi3-manylinux1_x86_64.whl", hash = "sha256:52c1c12dad6eb1d44213a0a76acf5f18f64653bd801300bef5e2f983405bdde5"}, + {file = "brotlicffi-1.0.9.2-cp35-abi3-manylinux2010_i686.whl", hash = "sha256:21cd400d24b344c218d8e32b394849e31b7c15784667575dbda9f65c46a64b0a"}, + {file = "brotlicffi-1.0.9.2-cp35-abi3-manylinux2010_x86_64.whl", hash = "sha256:71061f8bc86335b652e442260c4367b782a92c6e295cf5a10eff84c7d19d8cf5"}, + {file = "brotlicffi-1.0.9.2-cp35-abi3-manylinux2014_aarch64.whl", hash = "sha256:15e0db52c56056be6310fc116b3d7c6f34185594e261f23790b2fb6489998363"}, + {file = "brotlicffi-1.0.9.2-cp35-abi3-win32.whl", hash = "sha256:551305703d12a2dd1ae43d3dde35dee20b1cb49b5796279d4d34e2c6aec6be4d"}, + {file = "brotlicffi-1.0.9.2-cp35-abi3-win_amd64.whl", hash = "sha256:2be4fb8a7cb482f226af686cd06d2a2cab164ccdf99e460f8e3a5ec9a5337da2"}, + {file = "brotlicffi-1.0.9.2-pp27-pypy_73-macosx_10_9_x86_64.whl", hash = "sha256:8e7221d8a084d32d15c7b58e0ce0573972375c5038423dbe83f217cfe512e680"}, + {file = "brotlicffi-1.0.9.2-pp27-pypy_73-manylinux1_x86_64.whl", hash = "sha256:75a46bc5ed2753e1648cc211dcb2c1ac66116038766822dc104023f67ff4dfd8"}, + {file = "brotlicffi-1.0.9.2-pp27-pypy_73-manylinux2010_x86_64.whl", hash = "sha256:1e27c43ef72a278f9739b12b2df80ee72048cd4cbe498f8bbe08aaaa67a5d5c8"}, + {file = "brotlicffi-1.0.9.2-pp27-pypy_73-win32.whl", hash = "sha256:feb942814285bdc5e97efc77a04e48283c17dfab9ea082d79c0a7b9e53ef1eab"}, + {file = "brotlicffi-1.0.9.2-pp36-pypy36_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a6208d82c3172eeeb3be83ed4efd5831552c7cd47576468e50fcf0fb23fcf97f"}, + {file = "brotlicffi-1.0.9.2-pp36-pypy36_pp73-manylinux1_x86_64.whl", hash = "sha256:408c810c599786fb806556ff17e844a903884e6370ca400bcec7fa286149f39c"}, + {file = "brotlicffi-1.0.9.2-pp36-pypy36_pp73-manylinux2010_x86_64.whl", hash = "sha256:a73099858ee343e8801710a08be8d194f47715ff21e98d92a19ac461058f52d1"}, + {file = "brotlicffi-1.0.9.2-pp36-pypy36_pp73-win32.whl", hash = "sha256:916b790f967a18a595e61f218c252f83718ac91f24157d622cf0fa710cd26ab7"}, + {file = "brotlicffi-1.0.9.2-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:ba4a00263af40e875ec3d6c7f623cbf8c795b55705da18c64ec36b6bf0848bc5"}, + {file = "brotlicffi-1.0.9.2-pp37-pypy37_pp73-manylinux1_x86_64.whl", hash = "sha256:df78aa47741122b0d5463f1208b7bb18bc9706dee5152d9f56e0ead4865015cd"}, + {file = "brotlicffi-1.0.9.2-pp37-pypy37_pp73-manylinux2010_x86_64.whl", hash = "sha256:9030cd5099252d16bfa4e22659c84a89c102e94f8e81d30764788b72e2d7cfb7"}, + {file = "brotlicffi-1.0.9.2-pp37-pypy37_pp73-win32.whl", hash = "sha256:7e72978f4090a161885b114f87b784f538dcb77dafc6602592c1cf39ae8d243d"}, + {file = "brotlicffi-1.0.9.2.tar.gz", hash = "sha256:0c248a68129d8fc6a217767406c731e498c3e19a7be05ea0a90c3c86637b7d96"}, +] + +[package.dependencies] +cffi = ">=1.0.0" + +[[package]] +name = "celery" +version = "5.2.7" +description = "Distributed Task Queue." +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "celery-5.2.7-py3-none-any.whl", hash = "sha256:138420c020cd58d6707e6257b6beda91fd39af7afde5d36c6334d175302c0e14"}, + {file = "celery-5.2.7.tar.gz", hash = "sha256:fafbd82934d30f8a004f81e8f7a062e31413a23d444be8ee3326553915958c6d"}, +] + +[package.dependencies] +billiard = ">=3.6.4.0,<4.0" +click = ">=8.0.3,<9.0" +click-didyoumean = ">=0.0.3" +click-plugins = ">=1.1.1" +click-repl = ">=0.2.0" +kombu = ">=5.2.3,<6.0" +librabbitmq = {version = ">=1.5.0", optional = true, markers = "extra == \"librabbitmq\""} +pytz = ">=2021.3" +vine = ">=5.0.0,<6.0" + +[package.extras] +arangodb = ["pyArango (>=1.3.2)"] +auth = ["cryptography"] +azureblockblob = ["azure-storage-blob (==12.9.0)"] +brotli = ["brotli (>=1.0.0)", "brotlipy (>=0.7.0)"] +cassandra = ["cassandra-driver (<3.21.0)"] +consul = ["python-consul2"] +cosmosdbsql = ["pydocumentdb (==2.3.2)"] +couchbase = ["couchbase (>=3.0.0)"] +couchdb = ["pycouchdb"] +django = ["Django (>=1.11)"] +dynamodb = ["boto3 (>=1.9.178)"] +elasticsearch = ["elasticsearch"] +eventlet = ["eventlet (>=0.32.0)"] +gevent = ["gevent (>=1.5.0)"] +librabbitmq = ["librabbitmq (>=1.5.0)"] +memcache = ["pylibmc"] +mongodb = ["pymongo[srv] (>=3.11.1)"] +msgpack = ["msgpack"] +pymemcache = ["python-memcached"] +pyro = ["pyro4"] +pytest = ["pytest-celery"] +redis = ["redis (>=3.4.1,!=4.0.0,!=4.0.1)"] +s3 = ["boto3 (>=1.9.125)"] +slmq = ["softlayer-messaging (>=1.0.3)"] +solar = ["ephem"] +sqlalchemy = ["sqlalchemy"] +sqs = ["kombu[sqs]"] +tblib = ["tblib (>=1.3.0)", "tblib (>=1.5.0)"] +yaml = ["PyYAML (>=3.10)"] +zookeeper = ["kazoo (>=1.3.1)"] +zstd = ["zstandard"] + +[[package]] +name = "certifi" +version = "2022.12.7" +description = "Python package for providing Mozilla's CA Bundle." +category = "main" +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2022.12.7-py3-none-any.whl", hash = "sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18"}, + {file = "certifi-2022.12.7.tar.gz", hash = "sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3"}, +] + +[[package]] +name = "cffi" +version = "1.15.1" +description = "Foreign Function Interface for Python calling C code." +category = "main" +optional = false +python-versions = "*" +files = [ + {file = "cffi-1.15.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2"}, + {file = "cffi-1.15.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2"}, + {file = "cffi-1.15.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914"}, + {file = "cffi-1.15.1-cp27-cp27m-win32.whl", hash = "sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3"}, + {file = "cffi-1.15.1-cp27-cp27m-win_amd64.whl", hash = "sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e"}, + {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162"}, + {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b"}, + {file = "cffi-1.15.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21"}, + {file = "cffi-1.15.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4"}, + {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01"}, + {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e"}, + {file = "cffi-1.15.1-cp310-cp310-win32.whl", hash = "sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2"}, + {file = "cffi-1.15.1-cp310-cp310-win_amd64.whl", hash = "sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d"}, + {file = "cffi-1.15.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac"}, + {file = "cffi-1.15.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c"}, + {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef"}, + {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8"}, + {file = "cffi-1.15.1-cp311-cp311-win32.whl", hash = "sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d"}, + {file = "cffi-1.15.1-cp311-cp311-win_amd64.whl", hash = "sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104"}, + {file = "cffi-1.15.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e"}, + {file = "cffi-1.15.1-cp36-cp36m-win32.whl", hash = "sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf"}, + {file = "cffi-1.15.1-cp36-cp36m-win_amd64.whl", hash = "sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497"}, + {file = "cffi-1.15.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426"}, + {file = "cffi-1.15.1-cp37-cp37m-win32.whl", hash = "sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9"}, + {file = "cffi-1.15.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045"}, + {file = "cffi-1.15.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192"}, + {file = "cffi-1.15.1-cp38-cp38-win32.whl", hash = "sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314"}, + {file = "cffi-1.15.1-cp38-cp38-win_amd64.whl", hash = "sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5"}, + {file = "cffi-1.15.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585"}, + {file = "cffi-1.15.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27"}, + {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76"}, + {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3"}, + {file = "cffi-1.15.1-cp39-cp39-win32.whl", hash = "sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee"}, + {file = "cffi-1.15.1-cp39-cp39-win_amd64.whl", hash = "sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c"}, + {file = "cffi-1.15.1.tar.gz", hash = "sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9"}, +] + +[package.dependencies] +pycparser = "*" + +[[package]] +name = "charset-normalizer" +version = "3.1.0" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +category = "main" +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset-normalizer-3.1.0.tar.gz", hash = "sha256:34e0a2f9c370eb95597aae63bf85eb5e96826d81e3dcf88b8886012906f509b5"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e0ac8959c929593fee38da1c2b64ee9778733cdf03c482c9ff1d508b6b593b2b"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d7fc3fca01da18fbabe4625d64bb612b533533ed10045a2ac3dd194bfa656b60"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:04eefcee095f58eaabe6dc3cc2262f3bcd776d2c67005880894f447b3f2cb9c1"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20064ead0717cf9a73a6d1e779b23d149b53daf971169289ed2ed43a71e8d3b0"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1435ae15108b1cb6fffbcea2af3d468683b7afed0169ad718451f8db5d1aff6f"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c84132a54c750fda57729d1e2599bb598f5fa0344085dbde5003ba429a4798c0"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75f2568b4189dda1c567339b48cba4ac7384accb9c2a7ed655cd86b04055c795"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:11d3bcb7be35e7b1bba2c23beedac81ee893ac9871d0ba79effc7fc01167db6c"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:891cf9b48776b5c61c700b55a598621fdb7b1e301a550365571e9624f270c203"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:5f008525e02908b20e04707a4f704cd286d94718f48bb33edddc7d7b584dddc1"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:b06f0d3bf045158d2fb8837c5785fe9ff9b8c93358be64461a1089f5da983137"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:49919f8400b5e49e961f320c735388ee686a62327e773fa5b3ce6721f7e785ce"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:22908891a380d50738e1f978667536f6c6b526a2064156203d418f4856d6e86a"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-win32.whl", hash = "sha256:12d1a39aa6b8c6f6248bb54550efcc1c38ce0d8096a146638fd4738e42284448"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:65ed923f84a6844de5fd29726b888e58c62820e0769b76565480e1fdc3d062f8"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9a3267620866c9d17b959a84dd0bd2d45719b817245e49371ead79ed4f710d19"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6734e606355834f13445b6adc38b53c0fd45f1a56a9ba06c2058f86893ae8017"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f8303414c7b03f794347ad062c0516cee0e15f7a612abd0ce1e25caf6ceb47df"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aaf53a6cebad0eae578f062c7d462155eada9c172bd8c4d250b8c1d8eb7f916a"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3dc5b6a8ecfdc5748a7e429782598e4f17ef378e3e272eeb1340ea57c9109f41"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e1b25e3ad6c909f398df8921780d6a3d120d8c09466720226fc621605b6f92b1"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ca564606d2caafb0abe6d1b5311c2649e8071eb241b2d64e75a0d0065107e62"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b82fab78e0b1329e183a65260581de4375f619167478dddab510c6c6fb04d9b6"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bd7163182133c0c7701b25e604cf1611c0d87712e56e88e7ee5d72deab3e76b5"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:11d117e6c63e8f495412d37e7dc2e2fff09c34b2d09dbe2bee3c6229577818be"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:cf6511efa4801b9b38dc5546d7547d5b5c6ef4b081c60b23e4d941d0eba9cbeb"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:abc1185d79f47c0a7aaf7e2412a0eb2c03b724581139193d2d82b3ad8cbb00ac"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cb7b2ab0188829593b9de646545175547a70d9a6e2b63bf2cd87a0a391599324"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-win32.whl", hash = "sha256:c36bcbc0d5174a80d6cccf43a0ecaca44e81d25be4b7f90f0ed7bcfbb5a00909"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:cca4def576f47a09a943666b8f829606bcb17e2bc2d5911a46c8f8da45f56755"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0c95f12b74681e9ae127728f7e5409cbbef9cd914d5896ef238cc779b8152373"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fca62a8301b605b954ad2e9c3666f9d97f63872aa4efcae5492baca2056b74ab"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac0aa6cd53ab9a31d397f8303f92c42f534693528fafbdb997c82bae6e477ad9"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c3af8e0f07399d3176b179f2e2634c3ce9c1301379a6b8c9c9aeecd481da494f"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a5fc78f9e3f501a1614a98f7c54d3969f3ad9bba8ba3d9b438c3bc5d047dd28"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:628c985afb2c7d27a4800bfb609e03985aaecb42f955049957814e0491d4006d"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:74db0052d985cf37fa111828d0dd230776ac99c740e1a758ad99094be4f1803d"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:1e8fcdd8f672a1c4fc8d0bd3a2b576b152d2a349782d1eb0f6b8e52e9954731d"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:04afa6387e2b282cf78ff3dbce20f0cc071c12dc8f685bd40960cc68644cfea6"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:dd5653e67b149503c68c4018bf07e42eeed6b4e956b24c00ccdf93ac79cdff84"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d2686f91611f9e17f4548dbf050e75b079bbc2a82be565832bc8ea9047b61c8c"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-win32.whl", hash = "sha256:4155b51ae05ed47199dc5b2a4e62abccb274cee6b01da5b895099b61b1982974"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:322102cdf1ab682ecc7d9b1c5eed4ec59657a65e1c146a0da342b78f4112db23"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e633940f28c1e913615fd624fcdd72fdba807bf53ea6925d6a588e84e1151531"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3a06f32c9634a8705f4ca9946d667609f52cf130d5548881401f1eb2c39b1e2c"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7381c66e0561c5757ffe616af869b916c8b4e42b367ab29fedc98481d1e74e14"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3573d376454d956553c356df45bb824262c397c6e26ce43e8203c4c540ee0acb"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e89df2958e5159b811af9ff0f92614dabf4ff617c03a4c1c6ff53bf1c399e0e1"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:78cacd03e79d009d95635e7d6ff12c21eb89b894c354bd2b2ed0b4763373693b"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de5695a6f1d8340b12a5d6d4484290ee74d61e467c39ff03b39e30df62cf83a0"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1c60b9c202d00052183c9be85e5eaf18a4ada0a47d188a83c8f5c5b23252f649"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f645caaf0008bacf349875a974220f1f1da349c5dbe7c4ec93048cdc785a3326"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ea9f9c6034ea2d93d9147818f17c2a0860d41b71c38b9ce4d55f21b6f9165a11"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:80d1543d58bd3d6c271b66abf454d437a438dff01c3e62fdbcd68f2a11310d4b"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:73dc03a6a7e30b7edc5b01b601e53e7fc924b04e1835e8e407c12c037e81adbd"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6f5c2e7bc8a4bf7c426599765b1bd33217ec84023033672c1e9a8b35eaeaaaf8"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-win32.whl", hash = "sha256:12a2b561af122e3d94cdb97fe6fb2bb2b82cef0cdca131646fdb940a1eda04f0"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:3160a0fd9754aab7d47f95a6b63ab355388d890163eb03b2d2b87ab0a30cfa59"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:38e812a197bf8e71a59fe55b757a84c1f946d0ac114acafaafaf21667a7e169e"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6baf0baf0d5d265fa7944feb9f7451cc316bfe30e8df1a61b1bb08577c554f31"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8f25e17ab3039b05f762b0a55ae0b3632b2e073d9c8fc88e89aca31a6198e88f"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3747443b6a904001473370d7810aa19c3a180ccd52a7157aacc264a5ac79265e"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b116502087ce8a6b7a5f1814568ccbd0e9f6cfd99948aa59b0e241dc57cf739f"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d16fd5252f883eb074ca55cb622bc0bee49b979ae4e8639fff6ca3ff44f9f854"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21fa558996782fc226b529fdd2ed7866c2c6ec91cee82735c98a197fae39f706"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6f6c7a8a57e9405cad7485f4c9d3172ae486cfef1344b5ddd8e5239582d7355e"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ac3775e3311661d4adace3697a52ac0bab17edd166087d493b52d4f4f553f9f0"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:10c93628d7497c81686e8e5e557aafa78f230cd9e77dd0c40032ef90c18f2230"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:6f4f4668e1831850ebcc2fd0b1cd11721947b6dc7c00bf1c6bd3c929ae14f2c7"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:0be65ccf618c1e7ac9b849c315cc2e8a8751d9cfdaa43027d4f6624bd587ab7e"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:53d0a3fa5f8af98a1e261de6a3943ca631c526635eb5817a87a59d9a57ebf48f"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-win32.whl", hash = "sha256:a04f86f41a8916fe45ac5024ec477f41f886b3c435da2d4e3d2709b22ab02af1"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:830d2948a5ec37c386d3170c483063798d7879037492540f10a475e3fd6f244b"}, + {file = "charset_normalizer-3.1.0-py3-none-any.whl", hash = "sha256:3d9098b479e78c85080c98e1e35ff40b4a31d8953102bb0fd7d1b6f8a2111a3d"}, +] + +[[package]] +name = "click" +version = "8.1.3" +description = "Composable command line interface toolkit" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "click-8.1.3-py3-none-any.whl", hash = "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"}, + {file = "click-8.1.3.tar.gz", hash = "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[[package]] +name = "click-didyoumean" +version = "0.3.0" +description = "Enables git-like *did-you-mean* feature in click" +category = "main" +optional = false +python-versions = ">=3.6.2,<4.0.0" +files = [ + {file = "click-didyoumean-0.3.0.tar.gz", hash = "sha256:f184f0d851d96b6d29297354ed981b7dd71df7ff500d82fa6d11f0856bee8035"}, + {file = "click_didyoumean-0.3.0-py3-none-any.whl", hash = "sha256:a0713dc7a1de3f06bc0df5a9567ad19ead2d3d5689b434768a6145bff77c0667"}, +] + +[package.dependencies] +click = ">=7" + +[[package]] +name = "click-plugins" +version = "1.1.1" +description = "An extension module for click to enable registering CLI commands via setuptools entry-points." +category = "main" +optional = false +python-versions = "*" +files = [ + {file = "click-plugins-1.1.1.tar.gz", hash = "sha256:46ab999744a9d831159c3411bb0c79346d94a444df9a3a3742e9ed63645f264b"}, + {file = "click_plugins-1.1.1-py2.py3-none-any.whl", hash = "sha256:5d262006d3222f5057fd81e1623d4443e41dcda5dc815c06b442aa3c02889fc8"}, +] + +[package.dependencies] +click = ">=4.0" + +[package.extras] +dev = ["coveralls", "pytest (>=3.6)", "pytest-cov", "wheel"] + +[[package]] +name = "click-repl" +version = "0.2.0" +description = "REPL plugin for Click" +category = "main" +optional = false +python-versions = "*" +files = [ + {file = "click-repl-0.2.0.tar.gz", hash = "sha256:cd12f68d745bf6151210790540b4cb064c7b13e571bc64b6957d98d120dacfd8"}, + {file = "click_repl-0.2.0-py3-none-any.whl", hash = "sha256:94b3fbbc9406a236f176e0506524b2937e4b23b6f4c0c0b2a0a83f8a64e9194b"}, +] + +[package.dependencies] +click = "*" +prompt-toolkit = "*" +six = "*" + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +category = "main" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "coverage" +version = "7.2.3" +description = "Code coverage measurement for Python" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "coverage-7.2.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e58c0d41d336569d63d1b113bd573db8363bc4146f39444125b7f8060e4e04f5"}, + {file = "coverage-7.2.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:344e714bd0fe921fc72d97404ebbdbf9127bac0ca1ff66d7b79efc143cf7c0c4"}, + {file = "coverage-7.2.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:974bc90d6f6c1e59ceb1516ab00cf1cdfbb2e555795d49fa9571d611f449bcb2"}, + {file = "coverage-7.2.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0743b0035d4b0e32bc1df5de70fba3059662ace5b9a2a86a9f894cfe66569013"}, + {file = "coverage-7.2.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d0391fb4cfc171ce40437f67eb050a340fdbd0f9f49d6353a387f1b7f9dd4fa"}, + {file = "coverage-7.2.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4a42e1eff0ca9a7cb7dc9ecda41dfc7cbc17cb1d02117214be0561bd1134772b"}, + {file = "coverage-7.2.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:be19931a8dcbe6ab464f3339966856996b12a00f9fe53f346ab3be872d03e257"}, + {file = "coverage-7.2.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:72fcae5bcac3333a4cf3b8f34eec99cea1187acd55af723bcbd559adfdcb5535"}, + {file = "coverage-7.2.3-cp310-cp310-win32.whl", hash = "sha256:aeae2aa38395b18106e552833f2a50c27ea0000122bde421c31d11ed7e6f9c91"}, + {file = "coverage-7.2.3-cp310-cp310-win_amd64.whl", hash = "sha256:83957d349838a636e768251c7e9979e899a569794b44c3728eaebd11d848e58e"}, + {file = "coverage-7.2.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:dfd393094cd82ceb9b40df4c77976015a314b267d498268a076e940fe7be6b79"}, + {file = "coverage-7.2.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:182eb9ac3f2b4874a1f41b78b87db20b66da6b9cdc32737fbbf4fea0c35b23fc"}, + {file = "coverage-7.2.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1bb1e77a9a311346294621be905ea8a2c30d3ad371fc15bb72e98bfcfae532df"}, + {file = "coverage-7.2.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca0f34363e2634deffd390a0fef1aa99168ae9ed2af01af4a1f5865e362f8623"}, + {file = "coverage-7.2.3-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:55416d7385774285b6e2a5feca0af9652f7f444a4fa3d29d8ab052fafef9d00d"}, + {file = "coverage-7.2.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:06ddd9c0249a0546997fdda5a30fbcb40f23926df0a874a60a8a185bc3a87d93"}, + {file = "coverage-7.2.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:fff5aaa6becf2c6a1699ae6a39e2e6fb0672c2d42eca8eb0cafa91cf2e9bd312"}, + {file = "coverage-7.2.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:ea53151d87c52e98133eb8ac78f1206498c015849662ca8dc246255265d9c3c4"}, + {file = "coverage-7.2.3-cp311-cp311-win32.whl", hash = "sha256:8f6c930fd70d91ddee53194e93029e3ef2aabe26725aa3c2753df057e296b925"}, + {file = "coverage-7.2.3-cp311-cp311-win_amd64.whl", hash = "sha256:fa546d66639d69aa967bf08156eb8c9d0cd6f6de84be9e8c9819f52ad499c910"}, + {file = "coverage-7.2.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b2317d5ed777bf5a033e83d4f1389fd4ef045763141d8f10eb09a7035cee774c"}, + {file = "coverage-7.2.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:be9824c1c874b73b96288c6d3de793bf7f3a597770205068c6163ea1f326e8b9"}, + {file = "coverage-7.2.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2c3b2803e730dc2797a017335827e9da6da0e84c745ce0f552e66400abdfb9a1"}, + {file = "coverage-7.2.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f69770f5ca1994cb32c38965e95f57504d3aea96b6c024624fdd5bb1aa494a1"}, + {file = "coverage-7.2.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1127b16220f7bfb3f1049ed4a62d26d81970a723544e8252db0efde853268e21"}, + {file = "coverage-7.2.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:aa784405f0c640940595fa0f14064d8e84aff0b0f762fa18393e2760a2cf5841"}, + {file = "coverage-7.2.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:3146b8e16fa60427e03884301bf8209221f5761ac754ee6b267642a2fd354c48"}, + {file = "coverage-7.2.3-cp37-cp37m-win32.whl", hash = "sha256:1fd78b911aea9cec3b7e1e2622c8018d51c0d2bbcf8faaf53c2497eb114911c1"}, + {file = "coverage-7.2.3-cp37-cp37m-win_amd64.whl", hash = "sha256:0f3736a5d34e091b0a611964c6262fd68ca4363df56185902528f0b75dbb9c1f"}, + {file = "coverage-7.2.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:981b4df72c93e3bc04478153df516d385317628bd9c10be699c93c26ddcca8ab"}, + {file = "coverage-7.2.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c0045f8f23a5fb30b2eb3b8a83664d8dc4fb58faddf8155d7109166adb9f2040"}, + {file = "coverage-7.2.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f760073fcf8f3d6933178d67754f4f2d4e924e321f4bb0dcef0424ca0215eba1"}, + {file = "coverage-7.2.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c86bd45d1659b1ae3d0ba1909326b03598affbc9ed71520e0ff8c31a993ad911"}, + {file = "coverage-7.2.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:172db976ae6327ed4728e2507daf8a4de73c7cc89796483e0a9198fd2e47b462"}, + {file = "coverage-7.2.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:d2a3a6146fe9319926e1d477842ca2a63fe99af5ae690b1f5c11e6af074a6b5c"}, + {file = "coverage-7.2.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:f649dd53833b495c3ebd04d6eec58479454a1784987af8afb77540d6c1767abd"}, + {file = "coverage-7.2.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:7c4ed4e9f3b123aa403ab424430b426a1992e6f4c8fd3cb56ea520446e04d152"}, + {file = "coverage-7.2.3-cp38-cp38-win32.whl", hash = "sha256:eb0edc3ce9760d2f21637766c3aa04822030e7451981ce569a1b3456b7053f22"}, + {file = "coverage-7.2.3-cp38-cp38-win_amd64.whl", hash = "sha256:63cdeaac4ae85a179a8d6bc09b77b564c096250d759eed343a89d91bce8b6367"}, + {file = "coverage-7.2.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:20d1a2a76bb4eb00e4d36b9699f9b7aba93271c9c29220ad4c6a9581a0320235"}, + {file = "coverage-7.2.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ea748802cc0de4de92ef8244dd84ffd793bd2e7be784cd8394d557a3c751e21"}, + {file = "coverage-7.2.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21b154aba06df42e4b96fc915512ab39595105f6c483991287021ed95776d934"}, + {file = "coverage-7.2.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fd214917cabdd6f673a29d708574e9fbdb892cb77eb426d0eae3490d95ca7859"}, + {file = "coverage-7.2.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c2e58e45fe53fab81f85474e5d4d226eeab0f27b45aa062856c89389da2f0d9"}, + {file = "coverage-7.2.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:87ecc7c9a1a9f912e306997ffee020297ccb5ea388421fe62a2a02747e4d5539"}, + {file = "coverage-7.2.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:387065e420aed3c71b61af7e82c7b6bc1c592f7e3c7a66e9f78dd178699da4fe"}, + {file = "coverage-7.2.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ea3f5bc91d7d457da7d48c7a732beaf79d0c8131df3ab278e6bba6297e23c6c4"}, + {file = "coverage-7.2.3-cp39-cp39-win32.whl", hash = "sha256:ae7863a1d8db6a014b6f2ff9c1582ab1aad55a6d25bac19710a8df68921b6e30"}, + {file = "coverage-7.2.3-cp39-cp39-win_amd64.whl", hash = "sha256:3f04becd4fcda03c0160d0da9c8f0c246bc78f2f7af0feea1ec0930e7c93fa4a"}, + {file = "coverage-7.2.3-pp37.pp38.pp39-none-any.whl", hash = "sha256:965ee3e782c7892befc25575fa171b521d33798132692df428a09efacaffe8d0"}, + {file = "coverage-7.2.3.tar.gz", hash = "sha256:d298c2815fa4891edd9abe5ad6e6cb4207104c7dd9fd13aea3fdebf6f9b91259"}, +] + +[package.dependencies] +tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""} + +[package.extras] +toml = ["tomli"] + +[[package]] +name = "cryptography" +version = "40.0.1" +description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." +category = "main" +optional = false +python-versions = ">=3.6" +files = [ + {file = "cryptography-40.0.1-cp36-abi3-macosx_10_12_universal2.whl", hash = "sha256:918cb89086c7d98b1b86b9fdb70c712e5a9325ba6f7d7cfb509e784e0cfc6917"}, + {file = "cryptography-40.0.1-cp36-abi3-macosx_10_12_x86_64.whl", hash = "sha256:9618a87212cb5200500e304e43691111570e1f10ec3f35569fdfcd17e28fd797"}, + {file = "cryptography-40.0.1-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3a4805a4ca729d65570a1b7cac84eac1e431085d40387b7d3bbaa47e39890b88"}, + {file = "cryptography-40.0.1-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63dac2d25c47f12a7b8aa60e528bfb3c51c5a6c5a9f7c86987909c6c79765554"}, + {file = "cryptography-40.0.1-cp36-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:0a4e3406cfed6b1f6d6e87ed243363652b2586b2d917b0609ca4f97072994405"}, + {file = "cryptography-40.0.1-cp36-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:1e0af458515d5e4028aad75f3bb3fe7a31e46ad920648cd59b64d3da842e4356"}, + {file = "cryptography-40.0.1-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:d8aa3609d337ad85e4eb9bb0f8bcf6e4409bfb86e706efa9a027912169e89122"}, + {file = "cryptography-40.0.1-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:cf91e428c51ef692b82ce786583e214f58392399cf65c341bc7301d096fa3ba2"}, + {file = "cryptography-40.0.1-cp36-abi3-win32.whl", hash = "sha256:650883cc064297ef3676b1db1b7b1df6081794c4ada96fa457253c4cc40f97db"}, + {file = "cryptography-40.0.1-cp36-abi3-win_amd64.whl", hash = "sha256:a805a7bce4a77d51696410005b3e85ae2839bad9aa38894afc0aa99d8e0c3160"}, + {file = "cryptography-40.0.1-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:cd033d74067d8928ef00a6b1327c8ea0452523967ca4463666eeba65ca350d4c"}, + {file = "cryptography-40.0.1-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:d36bbeb99704aabefdca5aee4eba04455d7a27ceabd16f3b3ba9bdcc31da86c4"}, + {file = "cryptography-40.0.1-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:32057d3d0ab7d4453778367ca43e99ddb711770477c4f072a51b3ca69602780a"}, + {file = "cryptography-40.0.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:f5d7b79fa56bc29580faafc2ff736ce05ba31feaa9d4735048b0de7d9ceb2b94"}, + {file = "cryptography-40.0.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:7c872413353c70e0263a9368c4993710070e70ab3e5318d85510cc91cce77e7c"}, + {file = "cryptography-40.0.1-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:28d63d75bf7ae4045b10de5413fb1d6338616e79015999ad9cf6fc538f772d41"}, + {file = "cryptography-40.0.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:6f2bbd72f717ce33100e6467572abaedc61f1acb87b8d546001328d7f466b778"}, + {file = "cryptography-40.0.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:cc3a621076d824d75ab1e1e530e66e7e8564e357dd723f2533225d40fe35c60c"}, + {file = "cryptography-40.0.1.tar.gz", hash = "sha256:2803f2f8b1e95f614419926c7e6f55d828afc614ca5ed61543877ae668cc3472"}, +] + +[package.dependencies] +cffi = ">=1.12" + +[package.extras] +docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] +docstest = ["pyenchant (>=1.6.11)", "sphinxcontrib-spelling (>=4.0.1)", "twine (>=1.12.0)"] +pep8test = ["black", "check-manifest", "mypy", "ruff"] +sdist = ["setuptools-rust (>=0.11.4)"] +ssh = ["bcrypt (>=3.1.5)"] +test = ["iso8601", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-shard (>=0.1.2)", "pytest-subtests", "pytest-xdist"] +test-randomorder = ["pytest-randomly"] +tox = ["tox"] + +[[package]] +name = "cssselect2" +version = "0.7.0" +description = "CSS selectors for Python ElementTree" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "cssselect2-0.7.0-py3-none-any.whl", hash = "sha256:fd23a65bfd444595913f02fc71f6b286c29261e354c41d722ca7a261a49b5969"}, + {file = "cssselect2-0.7.0.tar.gz", hash = "sha256:1ccd984dab89fc68955043aca4e1b03e0cf29cad9880f6e28e3ba7a74b14aa5a"}, +] + +[package.dependencies] +tinycss2 = "*" +webencodings = "*" + +[package.extras] +doc = ["sphinx", "sphinx_rtd_theme"] +test = ["flake8", "isort", "pytest"] + +[[package]] +name = "debugpy" +version = "1.6.7" +description = "An implementation of the Debug Adapter Protocol for Python" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "debugpy-1.6.7-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:b3e7ac809b991006ad7f857f016fa92014445085711ef111fdc3f74f66144096"}, + {file = "debugpy-1.6.7-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3876611d114a18aafef6383695dfc3f1217c98a9168c1aaf1a02b01ec7d8d1e"}, + {file = "debugpy-1.6.7-cp310-cp310-win32.whl", hash = "sha256:33edb4afa85c098c24cc361d72ba7c21bb92f501104514d4ffec1fb36e09c01a"}, + {file = "debugpy-1.6.7-cp310-cp310-win_amd64.whl", hash = "sha256:ed6d5413474e209ba50b1a75b2d9eecf64d41e6e4501977991cdc755dc83ab0f"}, + {file = "debugpy-1.6.7-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:38ed626353e7c63f4b11efad659be04c23de2b0d15efff77b60e4740ea685d07"}, + {file = "debugpy-1.6.7-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:279d64c408c60431c8ee832dfd9ace7c396984fd7341fa3116aee414e7dcd88d"}, + {file = "debugpy-1.6.7-cp37-cp37m-win32.whl", hash = "sha256:dbe04e7568aa69361a5b4c47b4493d5680bfa3a911d1e105fbea1b1f23f3eb45"}, + {file = "debugpy-1.6.7-cp37-cp37m-win_amd64.whl", hash = "sha256:f90a2d4ad9a035cee7331c06a4cf2245e38bd7c89554fe3b616d90ab8aab89cc"}, + {file = "debugpy-1.6.7-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:5224eabbbeddcf1943d4e2821876f3e5d7d383f27390b82da5d9558fd4eb30a9"}, + {file = "debugpy-1.6.7-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bae1123dff5bfe548ba1683eb972329ba6d646c3a80e6b4c06cd1b1dd0205e9b"}, + {file = "debugpy-1.6.7-cp38-cp38-win32.whl", hash = "sha256:9cd10cf338e0907fdcf9eac9087faa30f150ef5445af5a545d307055141dd7a4"}, + {file = "debugpy-1.6.7-cp38-cp38-win_amd64.whl", hash = "sha256:aaf6da50377ff4056c8ed470da24632b42e4087bc826845daad7af211e00faad"}, + {file = "debugpy-1.6.7-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:0679b7e1e3523bd7d7869447ec67b59728675aadfc038550a63a362b63029d2c"}, + {file = "debugpy-1.6.7-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de86029696e1b3b4d0d49076b9eba606c226e33ae312a57a46dca14ff370894d"}, + {file = "debugpy-1.6.7-cp39-cp39-win32.whl", hash = "sha256:d71b31117779d9a90b745720c0eab54ae1da76d5b38c8026c654f4a066b0130a"}, + {file = "debugpy-1.6.7-cp39-cp39-win_amd64.whl", hash = "sha256:c0ff93ae90a03b06d85b2c529eca51ab15457868a377c4cc40a23ab0e4e552a3"}, + {file = "debugpy-1.6.7-py2.py3-none-any.whl", hash = "sha256:53f7a456bc50706a0eaabecf2d3ce44c4d5010e46dfc65b6b81a518b42866267"}, + {file = "debugpy-1.6.7.zip", hash = "sha256:c4c2f0810fa25323abfdfa36cbbbb24e5c3b1a42cb762782de64439c575d67f2"}, +] + +[[package]] +name = "deprecation" +version = "2.1.0" +description = "A library to handle automated deprecations" +category = "main" +optional = false +python-versions = "*" +files = [ + {file = "deprecation-2.1.0-py2.py3-none-any.whl", hash = "sha256:a10811591210e1fb0e768a8c25517cabeabcba6f0bf96564f8ff45189f90b14a"}, + {file = "deprecation-2.1.0.tar.gz", hash = "sha256:72b3bde64e5d778694b0cf68178aed03d15e15477116add3fb773e581f9518ff"}, +] + +[package.dependencies] +packaging = "*" + +[[package]] +name = "django" +version = "4.2" +description = "A high-level Python web framework that encourages rapid development and clean, pragmatic design." +category = "main" +optional = false +python-versions = ">=3.8" +files = [ + {file = "Django-4.2-py3-none-any.whl", hash = "sha256:ad33ed68db9398f5dfb33282704925bce044bef4261cd4fb59e4e7f9ae505a78"}, + {file = "Django-4.2.tar.gz", hash = "sha256:c36e2ab12824e2ac36afa8b2515a70c53c7742f0d6eaefa7311ec379558db997"}, +] + +[package.dependencies] +asgiref = ">=3.6.0,<4" +sqlparse = ">=0.3.1" +tzdata = {version = "*", markers = "sys_platform == \"win32\""} + +[package.extras] +argon2 = ["argon2-cffi (>=19.1.0)"] +bcrypt = ["bcrypt"] + +[[package]] +name = "django-csp" +version = "3.7" +description = "Django Content Security Policy support." +category = "main" +optional = false +python-versions = "*" +files = [ + {file = "django_csp-3.7-py2.py3-none-any.whl", hash = "sha256:01443a07723f9a479d498bd7bb63571aaa771e690f64bde515db6cdb76e8041a"}, + {file = "django_csp-3.7.tar.gz", hash = "sha256:01eda02ad3f10261c74131cdc0b5a6a62b7c7ad4fd017fbefb7a14776e0a9727"}, +] + +[package.dependencies] +Django = ">=1.8" + +[package.extras] +jinja2 = ["jinja2 (>=2.9.6)"] +tests = ["jinja2 (>=2.9.6)", "mock (==1.0.1)", "pep8 (==1.4.6)", "pytest (<4.0)", "pytest-django", "pytest-flakes (==1.0.1)", "pytest-pep8 (==1.0.6)", "six (==1.12.0)"] + +[[package]] +name = "django-debug-toolbar" +version = "4.0.0" +description = "A configurable set of panels that display various debug information about the current request/response." +category = "main" +optional = false +python-versions = ">=3.8" +files = [ + {file = "django_debug_toolbar-4.0.0-py3-none-any.whl", hash = "sha256:bad339d68520652ddc1580c76f136fcbc3e020fd5ed96510a89a02ec81bb3fb1"}, + {file = "django_debug_toolbar-4.0.0.tar.gz", hash = "sha256:89619f6e0ea1057dca47bfc429ed99b237ef70074dabc065a7faa5f00e1459cf"}, +] + +[package.dependencies] +django = ">=3.2.4" +sqlparse = ">=0.2" + +[[package]] +name = "django-filter" +version = "23.1" +description = "Django-filter is a reusable Django application for allowing users to filter querysets dynamically." +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "django-filter-23.1.tar.gz", hash = "sha256:dee5dcf2cea4d7f767e271b6d01f767fce7500676d5e5dc58dac8154000b87df"}, + {file = "django_filter-23.1-py3-none-any.whl", hash = "sha256:e3c52ad83c32fb5882125105efb5fea2a1d6a85e7dc64b04ef52edbf14451b6c"}, +] + +[package.dependencies] +Django = ">=3.2" + +[[package]] +name = "django-phonenumber-field" +version = "7.0.2" +description = "An international phone number field for django models." +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "django-phonenumber-field-7.0.2.tar.gz", hash = "sha256:de3e47b986b4959949762c16fd8fe26b3e462ef3e5531ed00950bd20c698576a"}, + {file = "django_phonenumber_field-7.0.2-py3-none-any.whl", hash = "sha256:9edad2b2602af25f2aefc73c4cf53eaf7abf9e17d73c1c4372bd3052bebb26f9"}, +] + +[package.dependencies] +Django = ">=3.2" +phonenumberslite = {version = ">=7.0.2", optional = true, markers = "extra == \"phonenumberslite\""} + +[package.extras] +phonenumbers = ["phonenumbers (>=7.0.2)"] +phonenumberslite = ["phonenumberslite (>=7.0.2)"] + +[[package]] +name = "django-storages" +version = "1.13.2" +description = "Support for many storage backends in Django" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "django-storages-1.13.2.tar.gz", hash = "sha256:cbadd15c909ceb7247d4ffc503f12a9bec36999df8d0bef7c31e57177d512688"}, + {file = "django_storages-1.13.2-py3-none-any.whl", hash = "sha256:31dc5a992520be571908c4c40d55d292660ece3a55b8141462b4e719aa38eab3"}, +] + +[package.dependencies] +Django = ">=3.2" + +[package.extras] +azure = ["azure-storage-blob (>=12.0.0)"] +boto3 = ["boto3 (>=1.4.4)"] +dropbox = ["dropbox (>=7.2.1)"] +google = ["google-cloud-storage (>=1.27.0)"] +libcloud = ["apache-libcloud"] +sftp = ["paramiko (>=1.10.0)"] + +[[package]] +name = "djangorestframework" +version = "3.14.0" +description = "Web APIs for Django, made easy." +category = "main" +optional = false +python-versions = ">=3.6" +files = [ + {file = "djangorestframework-3.14.0-py3-none-any.whl", hash = "sha256:eb63f58c9f218e1a7d064d17a70751f528ed4e1d35547fdade9aaf4cd103fd08"}, + {file = "djangorestframework-3.14.0.tar.gz", hash = "sha256:579a333e6256b09489cbe0a067e66abe55c6595d8926be6b99423786334350c8"}, +] + +[package.dependencies] +django = ">=3.0" +pytz = "*" + +[[package]] +name = "drf-nested-routers" +version = "0.93.4" +description = "Nested resources for the Django Rest Framework" +category = "main" +optional = false +python-versions = ">=3.5" +files = [ + {file = "drf-nested-routers-0.93.4.tar.gz", hash = "sha256:01aa556b8c08608bb74fb34f6ca065a5183f2cda4dc0478192cc17a2581d71b0"}, + {file = "drf_nested_routers-0.93.4-py2.py3-none-any.whl", hash = "sha256:996b77f3f4dfaf64569e7b8f04e3919945f90f95366838ca5b8bed9dd709d6c5"}, +] + +[package.dependencies] +Django = ">=1.11" +djangorestframework = ">=3.6.0" + +[[package]] +name = "elastic-apm" +version = "6.15.1" +description = "The official Python module for Elastic APM" +category = "main" +optional = false +python-versions = "<4,>=3.6" +files = [ + {file = "elastic-apm-6.15.1.tar.gz", hash = "sha256:9bc8ec3bd5ef0a108cdb2250dcb70b700ef09f48f97f494e02becd4c7531a245"}, + {file = "elastic_apm-6.15.1-py2.py3-none-any.whl", hash = "sha256:b704ff61ae985611b1b9846a4cc0ab88bf119c66dad6985ae169c68284aac19c"}, +] + +[package.dependencies] +certifi = "*" +urllib3 = "*" +wrapt = ">=1.14.1" + +[package.extras] +aiohttp = ["aiohttp"] +flask = ["blinker"] +opentelemetry = ["opentelemetry-api", "opentelemetry-sdk"] +opentracing = ["opentracing (>=2.0.0)"] +sanic = ["sanic"] +starlette = ["starlette"] +tornado = ["tornado"] + +[[package]] +name = "exceptiongroup" +version = "1.1.1" +description = "Backport of PEP 654 (exception groups)" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "exceptiongroup-1.1.1-py3-none-any.whl", hash = "sha256:232c37c63e4f682982c8b6459f33a8981039e5fb8756b2074364e5055c498c9e"}, + {file = "exceptiongroup-1.1.1.tar.gz", hash = "sha256:d484c3090ba2889ae2928419117447a14daf3c1231d5e30d0aae34f354f01785"}, +] + +[package.extras] +test = ["pytest (>=6)"] + +[[package]] +name = "execnet" +version = "1.9.0" +description = "execnet: rapid multi-Python deployment" +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "execnet-1.9.0-py2.py3-none-any.whl", hash = "sha256:a295f7cc774947aac58dde7fdc85f4aa00c42adf5d8f5468fc630c1acf30a142"}, + {file = "execnet-1.9.0.tar.gz", hash = "sha256:8f694f3ba9cc92cab508b152dcfe322153975c29bda272e2fd7f3f00f36e47c5"}, +] + +[package.extras] +testing = ["pre-commit"] + +[[package]] +name = "fido2" +version = "1.1.1" +description = "FIDO2/WebAuthn library for implementing clients and servers." +category = "main" +optional = false +python-versions = ">=3.7,<4.0" +files = [ + {file = "fido2-1.1.1-py3-none-any.whl", hash = "sha256:54017b69522b1581e4222443a0b3fff5eb2626f8e773a4a7b955f3e55fb3b4fc"}, + {file = "fido2-1.1.1.tar.gz", hash = "sha256:5dc495ca8c59c1c337383b4b8c314d46b92d5c6fc650e71984c6d7f954079fc3"}, +] + +[package.dependencies] +cryptography = ">=2.6,<35 || >35,<43" + +[package.extras] +pcsc = ["pyscard (>=1.9,<3)"] + +[[package]] +name = "fonttools" +version = "4.39.3" +description = "Tools to manipulate font files" +category = "main" +optional = false +python-versions = ">=3.8" +files = [ + {file = "fonttools-4.39.3-py3-none-any.whl", hash = "sha256:64c0c05c337f826183637570ac5ab49ee220eec66cf50248e8df527edfa95aeb"}, + {file = "fonttools-4.39.3.zip", hash = "sha256:9234b9f57b74e31b192c3fc32ef1a40750a8fbc1cd9837a7b7bfc4ca4a5c51d7"}, +] + +[package.dependencies] +brotli = {version = ">=1.0.1", optional = true, markers = "platform_python_implementation == \"CPython\" and extra == \"woff\""} +brotlicffi = {version = ">=0.8.0", optional = true, markers = "platform_python_implementation != \"CPython\" and extra == \"woff\""} +zopfli = {version = ">=0.1.4", optional = true, markers = "extra == \"woff\""} + +[package.extras] +all = ["brotli (>=1.0.1)", "brotlicffi (>=0.8.0)", "fs (>=2.2.0,<3)", "lxml (>=4.0,<5)", "lz4 (>=1.7.4.2)", "matplotlib", "munkres", "scipy", "skia-pathops (>=0.5.0)", "sympy", "uharfbuzz (>=0.23.0)", "unicodedata2 (>=15.0.0)", "xattr", "zopfli (>=0.1.4)"] +graphite = ["lz4 (>=1.7.4.2)"] +interpolatable = ["munkres", "scipy"] +lxml = ["lxml (>=4.0,<5)"] +pathops = ["skia-pathops (>=0.5.0)"] +plot = ["matplotlib"] +repacker = ["uharfbuzz (>=0.23.0)"] +symfont = ["sympy"] +type1 = ["xattr"] +ufo = ["fs (>=2.2.0,<3)"] +unicode = ["unicodedata2 (>=15.0.0)"] +woff = ["brotli (>=1.0.1)", "brotlicffi (>=0.8.0)", "zopfli (>=0.1.4)"] + +[[package]] +name = "greenlet" +version = "2.0.1" +description = "Lightweight in-process concurrent programming" +category = "main" +optional = false +python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*" +files = [ + {file = "greenlet-2.0.1-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:9ed358312e63bf683b9ef22c8e442ef6c5c02973f0c2a939ec1d7b50c974015c"}, + {file = "greenlet-2.0.1-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:4f09b0010e55bec3239278f642a8a506b91034f03a4fb28289a7d448a67f1515"}, + {file = "greenlet-2.0.1-cp27-cp27m-win32.whl", hash = "sha256:1407fe45246632d0ffb7a3f4a520ba4e6051fc2cbd61ba1f806900c27f47706a"}, + {file = "greenlet-2.0.1-cp27-cp27m-win_amd64.whl", hash = "sha256:3001d00eba6bbf084ae60ec7f4bb8ed375748f53aeaefaf2a37d9f0370558524"}, + {file = "greenlet-2.0.1-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:d566b82e92ff2e09dd6342df7e0eb4ff6275a3f08db284888dcd98134dbd4243"}, + {file = "greenlet-2.0.1-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:0722c9be0797f544a3ed212569ca3fe3d9d1a1b13942d10dd6f0e8601e484d26"}, + {file = "greenlet-2.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4d37990425b4687ade27810e3b1a1c37825d242ebc275066cfee8cb6b8829ccd"}, + {file = "greenlet-2.0.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:be35822f35f99dcc48152c9839d0171a06186f2d71ef76dc57fa556cc9bf6b45"}, + {file = "greenlet-2.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c140e7eb5ce47249668056edf3b7e9900c6a2e22fb0eaf0513f18a1b2c14e1da"}, + {file = "greenlet-2.0.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d21681f09e297a5adaa73060737e3aa1279a13ecdcfcc6ef66c292cb25125b2d"}, + {file = "greenlet-2.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fb412b7db83fe56847df9c47b6fe3f13911b06339c2aa02dcc09dce8bbf582cd"}, + {file = "greenlet-2.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:c6a08799e9e88052221adca55741bf106ec7ea0710bca635c208b751f0d5b617"}, + {file = "greenlet-2.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9e112e03d37987d7b90c1e98ba5e1b59e1645226d78d73282f45b326f7bddcb9"}, + {file = "greenlet-2.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:56961cfca7da2fdd178f95ca407fa330c64f33289e1804b592a77d5593d9bd94"}, + {file = "greenlet-2.0.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:13ba6e8e326e2116c954074c994da14954982ba2795aebb881c07ac5d093a58a"}, + {file = "greenlet-2.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1bf633a50cc93ed17e494015897361010fc08700d92676c87931d3ea464123ce"}, + {file = "greenlet-2.0.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:9f2c221eecb7ead00b8e3ddb913c67f75cba078fd1d326053225a3f59d850d72"}, + {file = "greenlet-2.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:13ebf93c343dd8bd010cd98e617cb4c1c1f352a0cf2524c82d3814154116aa82"}, + {file = "greenlet-2.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:6f61d71bbc9b4a3de768371b210d906726535d6ca43506737682caa754b956cd"}, + {file = "greenlet-2.0.1-cp35-cp35m-macosx_10_14_x86_64.whl", hash = "sha256:2d0bac0385d2b43a7bd1d651621a4e0f1380abc63d6fb1012213a401cbd5bf8f"}, + {file = "greenlet-2.0.1-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:f6327b6907b4cb72f650a5b7b1be23a2aab395017aa6f1adb13069d66360eb3f"}, + {file = "greenlet-2.0.1-cp35-cp35m-win32.whl", hash = "sha256:81b0ea3715bf6a848d6f7149d25bf018fd24554a4be01fcbbe3fdc78e890b955"}, + {file = "greenlet-2.0.1-cp35-cp35m-win_amd64.whl", hash = "sha256:38255a3f1e8942573b067510f9611fc9e38196077b0c8eb7a8c795e105f9ce77"}, + {file = "greenlet-2.0.1-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:04957dc96669be041e0c260964cfef4c77287f07c40452e61abe19d647505581"}, + {file = "greenlet-2.0.1-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:4aeaebcd91d9fee9aa768c1b39cb12214b30bf36d2b7370505a9f2165fedd8d9"}, + {file = "greenlet-2.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:974a39bdb8c90a85982cdb78a103a32e0b1be986d411303064b28a80611f6e51"}, + {file = "greenlet-2.0.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8dca09dedf1bd8684767bc736cc20c97c29bc0c04c413e3276e0962cd7aeb148"}, + {file = "greenlet-2.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a4c0757db9bd08470ff8277791795e70d0bf035a011a528ee9a5ce9454b6cba2"}, + {file = "greenlet-2.0.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:5067920de254f1a2dee8d3d9d7e4e03718e8fd2d2d9db962c8c9fa781ae82a39"}, + {file = "greenlet-2.0.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:5a8e05057fab2a365c81abc696cb753da7549d20266e8511eb6c9d9f72fe3e92"}, + {file = "greenlet-2.0.1-cp36-cp36m-win32.whl", hash = "sha256:3d75b8d013086b08e801fbbb896f7d5c9e6ccd44f13a9241d2bf7c0df9eda928"}, + {file = "greenlet-2.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:097e3dae69321e9100202fc62977f687454cd0ea147d0fd5a766e57450c569fd"}, + {file = "greenlet-2.0.1-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:cb242fc2cda5a307a7698c93173d3627a2a90d00507bccf5bc228851e8304963"}, + {file = "greenlet-2.0.1-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:72b00a8e7c25dcea5946692a2485b1a0c0661ed93ecfedfa9b6687bd89a24ef5"}, + {file = "greenlet-2.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5b0ff9878333823226d270417f24f4d06f235cb3e54d1103b71ea537a6a86ce"}, + {file = "greenlet-2.0.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:be9e0fb2ada7e5124f5282d6381903183ecc73ea019568d6d63d33f25b2a9000"}, + {file = "greenlet-2.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b493db84d124805865adc587532ebad30efa68f79ad68f11b336e0a51ec86c2"}, + {file = "greenlet-2.0.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:0459d94f73265744fee4c2d5ec44c6f34aa8a31017e6e9de770f7bcf29710be9"}, + {file = "greenlet-2.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a20d33124935d27b80e6fdacbd34205732660e0a1d35d8b10b3328179a2b51a1"}, + {file = "greenlet-2.0.1-cp37-cp37m-win32.whl", hash = "sha256:ea688d11707d30e212e0110a1aac7f7f3f542a259235d396f88be68b649e47d1"}, + {file = "greenlet-2.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:afe07421c969e259e9403c3bb658968702bc3b78ec0b6fde3ae1e73440529c23"}, + {file = "greenlet-2.0.1-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:cd4ccc364cf75d1422e66e247e52a93da6a9b73cefa8cad696f3cbbb75af179d"}, + {file = "greenlet-2.0.1-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:4c8b1c43e75c42a6cafcc71defa9e01ead39ae80bd733a2608b297412beede68"}, + {file = "greenlet-2.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:659f167f419a4609bc0516fb18ea69ed39dbb25594934bd2dd4d0401660e8a1e"}, + {file = "greenlet-2.0.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:356e4519d4dfa766d50ecc498544b44c0249b6de66426041d7f8b751de4d6b48"}, + {file = "greenlet-2.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:811e1d37d60b47cb8126e0a929b58c046251f28117cb16fcd371eed61f66b764"}, + {file = "greenlet-2.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:d38ffd0e81ba8ef347d2be0772e899c289b59ff150ebbbbe05dc61b1246eb4e0"}, + {file = "greenlet-2.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0109af1138afbfb8ae647e31a2b1ab030f58b21dd8528c27beaeb0093b7938a9"}, + {file = "greenlet-2.0.1-cp38-cp38-win32.whl", hash = "sha256:88c8d517e78acdf7df8a2134a3c4b964415b575d2840a2746ddb1cc6175f8608"}, + {file = "greenlet-2.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:d6ee1aa7ab36475035eb48c01efae87d37936a8173fc4d7b10bb02c2d75dd8f6"}, + {file = "greenlet-2.0.1-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:b1992ba9d4780d9af9726bbcef6a1db12d9ab1ccc35e5773685a24b7fb2758eb"}, + {file = "greenlet-2.0.1-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:b5e83e4de81dcc9425598d9469a624826a0b1211380ac444c7c791d4a2137c19"}, + {file = "greenlet-2.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:505138d4fa69462447a562a7c2ef723c6025ba12ac04478bc1ce2fcc279a2db5"}, + {file = "greenlet-2.0.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cce1e90dd302f45716a7715517c6aa0468af0bf38e814ad4eab58e88fc09f7f7"}, + {file = "greenlet-2.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e9744c657d896c7b580455e739899e492a4a452e2dd4d2b3e459f6b244a638d"}, + {file = "greenlet-2.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:662e8f7cad915ba75d8017b3e601afc01ef20deeeabf281bd00369de196d7726"}, + {file = "greenlet-2.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:41b825d65f31e394b523c84db84f9383a2f7eefc13d987f308f4663794d2687e"}, + {file = "greenlet-2.0.1-cp39-cp39-win32.whl", hash = "sha256:db38f80540083ea33bdab614a9d28bcec4b54daa5aff1668d7827a9fc769ae0a"}, + {file = "greenlet-2.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:b23d2a46d53210b498e5b701a1913697671988f4bf8e10f935433f6e7c332fb6"}, + {file = "greenlet-2.0.1.tar.gz", hash = "sha256:42e602564460da0e8ee67cb6d7236363ee5e131aa15943b6670e44e5c2ed0f67"}, +] + +[package.extras] +docs = ["Sphinx", "docutils (<0.18)"] +test = ["faulthandler", "objgraph", "psutil"] + +[[package]] +name = "gunicorn" +version = "20.1.0" +description = "WSGI HTTP Server for UNIX" +category = "main" +optional = false +python-versions = ">=3.5" +files = [ + {file = "gunicorn-20.1.0-py3-none-any.whl", hash = "sha256:9dcc4547dbb1cb284accfb15ab5667a0e5d1881cc443e0677b4882a4067a807e"}, + {file = "gunicorn-20.1.0.tar.gz", hash = "sha256:e0a968b5ba15f8a328fdfd7ab1fcb5af4470c28aaf7e55df02a99bc13138e6e8"}, +] + +[package.dependencies] +setuptools = ">=3.0" + +[package.extras] +eventlet = ["eventlet (>=0.24.1)"] +gevent = ["gevent (>=1.4.0)"] +setproctitle = ["setproctitle"] +tornado = ["tornado (>=0.2)"] + +[[package]] +name = "h11" +version = "0.14.0" +description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, + {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, +] + +[[package]] +name = "html5lib" +version = "1.1" +description = "HTML parser based on the WHATWG HTML specification" +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "html5lib-1.1-py2.py3-none-any.whl", hash = "sha256:0d78f8fde1c230e99fe37986a60526d7049ed4bf8a9fadbad5f00e22e58e041d"}, + {file = "html5lib-1.1.tar.gz", hash = "sha256:b2e5b40261e20f354d198eae92afc10d750afb487ed5e50f9c4eaf07c184146f"}, +] + +[package.dependencies] +six = ">=1.9" +webencodings = "*" + +[package.extras] +all = ["chardet (>=2.2)", "genshi", "lxml"] +chardet = ["chardet (>=2.2)"] +genshi = ["genshi"] +lxml = ["lxml"] + +[[package]] +name = "httpcore" +version = "0.16.3" +description = "A minimal low-level HTTP client." +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "httpcore-0.16.3-py3-none-any.whl", hash = "sha256:da1fb708784a938aa084bde4feb8317056c55037247c787bd7e19eb2c2949dc0"}, + {file = "httpcore-0.16.3.tar.gz", hash = "sha256:c5d6f04e2fc530f39e0c077e6a30caa53f1451096120f1f38b954afd0b17c0cb"}, +] + +[package.dependencies] +anyio = ">=3.0,<5.0" +certifi = "*" +h11 = ">=0.13,<0.15" +sniffio = ">=1.0.0,<2.0.0" + +[package.extras] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (>=1.0.0,<2.0.0)"] + +[[package]] +name = "httpx" +version = "0.23.3" +description = "The next generation HTTP client." +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "httpx-0.23.3-py3-none-any.whl", hash = "sha256:a211fcce9b1254ea24f0cd6af9869b3d29aba40154e947d2a07bb499b3e310d6"}, + {file = "httpx-0.23.3.tar.gz", hash = "sha256:9818458eb565bb54898ccb9b8b251a28785dd4a55afbc23d0eb410754fe7d0f9"}, +] + +[package.dependencies] +certifi = "*" +httpcore = ">=0.15.0,<0.17.0" +rfc3986 = {version = ">=1.3,<2", extras = ["idna2008"]} +sniffio = "*" + +[package.extras] +brotli = ["brotli", "brotlicffi"] +cli = ["click (>=8.0.0,<9.0.0)", "pygments (>=2.0.0,<3.0.0)", "rich (>=10,<13)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (>=1.0.0,<2.0.0)"] + +[[package]] +name = "idna" +version = "3.4" +description = "Internationalized Domain Names in Applications (IDNA)" +category = "main" +optional = false +python-versions = ">=3.5" +files = [ + {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, + {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, +] + +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + +[[package]] +name = "jmespath" +version = "1.0.1" +description = "JSON Matching Expressions" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980"}, + {file = "jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"}, +] + +[[package]] +name = "jsonschema" +version = "4.17.3" +description = "An implementation of JSON Schema validation for Python" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "jsonschema-4.17.3-py3-none-any.whl", hash = "sha256:a870ad254da1a8ca84b6a2905cac29d265f805acc57af304784962a2aa6508f6"}, + {file = "jsonschema-4.17.3.tar.gz", hash = "sha256:0f864437ab8b6076ba6707453ef8f98a6a0d512a80e93f8abdb676f737ecb60d"}, +] + +[package.dependencies] +attrs = ">=17.4.0" +pyrsistent = ">=0.14.0,<0.17.0 || >0.17.0,<0.17.1 || >0.17.1,<0.17.2 || >0.17.2" + +[package.extras] +format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"] +format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=1.11)"] + +[[package]] +name = "kombu" +version = "5.2.4" +description = "Messaging library for Python." +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "kombu-5.2.4-py3-none-any.whl", hash = "sha256:8b213b24293d3417bcf0d2f5537b7f756079e3ea232a8386dcc89a59fd2361a4"}, + {file = "kombu-5.2.4.tar.gz", hash = "sha256:37cee3ee725f94ea8bb173eaab7c1760203ea53bbebae226328600f9d2799610"}, +] + +[package.dependencies] +amqp = ">=5.0.9,<6.0.0" +vine = "*" + +[package.extras] +azureservicebus = ["azure-servicebus (>=7.0.0)"] +azurestoragequeues = ["azure-storage-queue"] +consul = ["python-consul (>=0.6.0)"] +librabbitmq = ["librabbitmq (>=2.0.0)"] +mongodb = ["pymongo (>=3.3.0,<3.12.1)"] +msgpack = ["msgpack"] +pyro = ["pyro4"] +qpid = ["qpid-python (>=0.26)", "qpid-tools (>=0.26)"] +redis = ["redis (>=3.4.1,!=4.0.0,!=4.0.1)"] +slmq = ["softlayer-messaging (>=1.0.3)"] +sqlalchemy = ["sqlalchemy"] +sqs = ["boto3 (>=1.9.12)", "pycurl (>=7.44.1,<7.45.0)", "urllib3 (>=1.26.7)"] +yaml = ["PyYAML (>=3.10)"] +zookeeper = ["kazoo (>=1.3.1)"] + +[[package]] +name = "librabbitmq" +version = "2.0.0" +description = "AMQP Client using the rabbitmq-c library." +category = "main" +optional = false +python-versions = "*" +files = [ + {file = "librabbitmq-2.0.0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:c2a8113d3c831808d1d940fdf43e4882636a1efe2864df7ab3bb709a45016b37"}, + {file = "librabbitmq-2.0.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:3116e40c02d4285b8dd69834e4cbcb1a89ea534ca9147e865f11d44e7cc56eea"}, + {file = "librabbitmq-2.0.0-cp34-cp34m-manylinux1_x86_64.whl", hash = "sha256:cd9cc09343b193d7cf2cff6c6a578061863bd986a4bdf38f922e9dc32e15d944"}, + {file = "librabbitmq-2.0.0-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:98e355f486964dadae7e8b51c9a60e9aa0653bbe27f6b14542687f305c4c3652"}, + {file = "librabbitmq-2.0.0-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:5cdfb473573396d43d54cef9e9b4c74fa3d1516da51d04a7b261f6ef4e0bd8be"}, + {file = "librabbitmq-2.0.0.tar.gz", hash = "sha256:ffa2363a860ab5dcc3ce3703247e05e940c73d776c03a3f3f9deaf3cf43bb96c"}, +] + +[package.dependencies] +amqp = ">=1.4.6" +six = ">=1.0.0" + +[[package]] +name = "lorem-text" +version = "2.1" +description = "Dummy lorem ipsum text generator" +category = "main" +optional = false +python-versions = ">=3.5" +files = [ + {file = "lorem_text-2.1-py2.py3-none-any.whl", hash = "sha256:b512779addd08dcc221460ce428f7911b198f66c5763410f3de6d788eb62ceb3"}, + {file = "lorem_text-2.1.tar.gz", hash = "sha256:b984b5570295084012f5a9385672bab3470965c943b73d8d71a6efcd7a62b5c3"}, +] + +[package.dependencies] +Click = ">=7.0" + +[[package]] +name = "lxml" +version = "4.9.2" +description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, != 3.4.*" +files = [ + {file = "lxml-4.9.2-cp27-cp27m-macosx_10_15_x86_64.whl", hash = "sha256:76cf573e5a365e790396a5cc2b909812633409306c6531a6877c59061e42c4f2"}, + {file = "lxml-4.9.2-cp27-cp27m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b1f42b6921d0e81b1bcb5e395bc091a70f41c4d4e55ba99c6da2b31626c44892"}, + {file = "lxml-4.9.2-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:9f102706d0ca011de571de32c3247c6476b55bb6bc65a20f682f000b07a4852a"}, + {file = "lxml-4.9.2-cp27-cp27m-win32.whl", hash = "sha256:8d0b4612b66ff5d62d03bcaa043bb018f74dfea51184e53f067e6fdcba4bd8de"}, + {file = "lxml-4.9.2-cp27-cp27m-win_amd64.whl", hash = "sha256:4c8f293f14abc8fd3e8e01c5bd86e6ed0b6ef71936ded5bf10fe7a5efefbaca3"}, + {file = "lxml-4.9.2-cp27-cp27mu-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2899456259589aa38bfb018c364d6ae7b53c5c22d8e27d0ec7609c2a1ff78b50"}, + {file = "lxml-4.9.2-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6749649eecd6a9871cae297bffa4ee76f90b4504a2a2ab528d9ebe912b101975"}, + {file = "lxml-4.9.2-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:a08cff61517ee26cb56f1e949cca38caabe9ea9fbb4b1e10a805dc39844b7d5c"}, + {file = "lxml-4.9.2-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:85cabf64adec449132e55616e7ca3e1000ab449d1d0f9d7f83146ed5bdcb6d8a"}, + {file = "lxml-4.9.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:8340225bd5e7a701c0fa98284c849c9b9fc9238abf53a0ebd90900f25d39a4e4"}, + {file = "lxml-4.9.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:1ab8f1f932e8f82355e75dda5413a57612c6ea448069d4fb2e217e9a4bed13d4"}, + {file = "lxml-4.9.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:699a9af7dffaf67deeae27b2112aa06b41c370d5e7633e0ee0aea2e0b6c211f7"}, + {file = "lxml-4.9.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b9cc34af337a97d470040f99ba4282f6e6bac88407d021688a5d585e44a23184"}, + {file = "lxml-4.9.2-cp310-cp310-win32.whl", hash = "sha256:d02a5399126a53492415d4906ab0ad0375a5456cc05c3fc0fc4ca11771745cda"}, + {file = "lxml-4.9.2-cp310-cp310-win_amd64.whl", hash = "sha256:a38486985ca49cfa574a507e7a2215c0c780fd1778bb6290c21193b7211702ab"}, + {file = "lxml-4.9.2-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:c83203addf554215463b59f6399835201999b5e48019dc17f182ed5ad87205c9"}, + {file = "lxml-4.9.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:2a87fa548561d2f4643c99cd13131acb607ddabb70682dcf1dff5f71f781a4bf"}, + {file = "lxml-4.9.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:d6b430a9938a5a5d85fc107d852262ddcd48602c120e3dbb02137c83d212b380"}, + {file = "lxml-4.9.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:3efea981d956a6f7173b4659849f55081867cf897e719f57383698af6f618a92"}, + {file = "lxml-4.9.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:df0623dcf9668ad0445e0558a21211d4e9a149ea8f5666917c8eeec515f0a6d1"}, + {file = "lxml-4.9.2-cp311-cp311-win32.whl", hash = "sha256:da248f93f0418a9e9d94b0080d7ebc407a9a5e6d0b57bb30db9b5cc28de1ad33"}, + {file = "lxml-4.9.2-cp311-cp311-win_amd64.whl", hash = "sha256:3818b8e2c4b5148567e1b09ce739006acfaa44ce3156f8cbbc11062994b8e8dd"}, + {file = "lxml-4.9.2-cp35-cp35m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ca989b91cf3a3ba28930a9fc1e9aeafc2a395448641df1f387a2d394638943b0"}, + {file = "lxml-4.9.2-cp35-cp35m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:822068f85e12a6e292803e112ab876bc03ed1f03dddb80154c395f891ca6b31e"}, + {file = "lxml-4.9.2-cp35-cp35m-win32.whl", hash = "sha256:be7292c55101e22f2a3d4d8913944cbea71eea90792bf914add27454a13905df"}, + {file = "lxml-4.9.2-cp35-cp35m-win_amd64.whl", hash = "sha256:998c7c41910666d2976928c38ea96a70d1aa43be6fe502f21a651e17483a43c5"}, + {file = "lxml-4.9.2-cp36-cp36m-macosx_10_15_x86_64.whl", hash = "sha256:b26a29f0b7fc6f0897f043ca366142d2b609dc60756ee6e4e90b5f762c6adc53"}, + {file = "lxml-4.9.2-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:ab323679b8b3030000f2be63e22cdeea5b47ee0abd2d6a1dc0c8103ddaa56cd7"}, + {file = "lxml-4.9.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:689bb688a1db722485e4610a503e3e9210dcc20c520b45ac8f7533c837be76fe"}, + {file = "lxml-4.9.2-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:f49e52d174375a7def9915c9f06ec4e569d235ad428f70751765f48d5926678c"}, + {file = "lxml-4.9.2-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:36c3c175d34652a35475a73762b545f4527aec044910a651d2bf50de9c3352b1"}, + {file = "lxml-4.9.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:a35f8b7fa99f90dd2f5dc5a9fa12332642f087a7641289ca6c40d6e1a2637d8e"}, + {file = "lxml-4.9.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:58bfa3aa19ca4c0f28c5dde0ff56c520fbac6f0daf4fac66ed4c8d2fb7f22e74"}, + {file = "lxml-4.9.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:bc718cd47b765e790eecb74d044cc8d37d58562f6c314ee9484df26276d36a38"}, + {file = "lxml-4.9.2-cp36-cp36m-win32.whl", hash = "sha256:d5bf6545cd27aaa8a13033ce56354ed9e25ab0e4ac3b5392b763d8d04b08e0c5"}, + {file = "lxml-4.9.2-cp36-cp36m-win_amd64.whl", hash = "sha256:3ab9fa9d6dc2a7f29d7affdf3edebf6ece6fb28a6d80b14c3b2fb9d39b9322c3"}, + {file = "lxml-4.9.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:05ca3f6abf5cf78fe053da9b1166e062ade3fa5d4f92b4ed688127ea7d7b1d03"}, + {file = "lxml-4.9.2-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:a5da296eb617d18e497bcf0a5c528f5d3b18dadb3619fbdadf4ed2356ef8d941"}, + {file = "lxml-4.9.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:04876580c050a8c5341d706dd464ff04fd597095cc8c023252566a8826505726"}, + {file = "lxml-4.9.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:c9ec3eaf616d67db0764b3bb983962b4f385a1f08304fd30c7283954e6a7869b"}, + {file = "lxml-4.9.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2a29ba94d065945944016b6b74e538bdb1751a1db6ffb80c9d3c2e40d6fa9894"}, + {file = "lxml-4.9.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:a82d05da00a58b8e4c0008edbc8a4b6ec5a4bc1e2ee0fb6ed157cf634ed7fa45"}, + {file = "lxml-4.9.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:223f4232855ade399bd409331e6ca70fb5578efef22cf4069a6090acc0f53c0e"}, + {file = "lxml-4.9.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d17bc7c2ccf49c478c5bdd447594e82692c74222698cfc9b5daae7ae7e90743b"}, + {file = "lxml-4.9.2-cp37-cp37m-win32.whl", hash = "sha256:b64d891da92e232c36976c80ed7ebb383e3f148489796d8d31a5b6a677825efe"}, + {file = "lxml-4.9.2-cp37-cp37m-win_amd64.whl", hash = "sha256:a0a336d6d3e8b234a3aae3c674873d8f0e720b76bc1d9416866c41cd9500ffb9"}, + {file = "lxml-4.9.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:da4dd7c9c50c059aba52b3524f84d7de956f7fef88f0bafcf4ad7dde94a064e8"}, + {file = "lxml-4.9.2-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:821b7f59b99551c69c85a6039c65b75f5683bdc63270fec660f75da67469ca24"}, + {file = "lxml-4.9.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:e5168986b90a8d1f2f9dc1b841467c74221bd752537b99761a93d2d981e04889"}, + {file = "lxml-4.9.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:8e20cb5a47247e383cf4ff523205060991021233ebd6f924bca927fcf25cf86f"}, + {file = "lxml-4.9.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:13598ecfbd2e86ea7ae45ec28a2a54fb87ee9b9fdb0f6d343297d8e548392c03"}, + {file = "lxml-4.9.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:880bbbcbe2fca64e2f4d8e04db47bcdf504936fa2b33933efd945e1b429bea8c"}, + {file = "lxml-4.9.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:7d2278d59425777cfcb19735018d897ca8303abe67cc735f9f97177ceff8027f"}, + {file = "lxml-4.9.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5344a43228767f53a9df6e5b253f8cdca7dfc7b7aeae52551958192f56d98457"}, + {file = "lxml-4.9.2-cp38-cp38-win32.whl", hash = "sha256:925073b2fe14ab9b87e73f9a5fde6ce6392da430f3004d8b72cc86f746f5163b"}, + {file = "lxml-4.9.2-cp38-cp38-win_amd64.whl", hash = "sha256:9b22c5c66f67ae00c0199f6055705bc3eb3fcb08d03d2ec4059a2b1b25ed48d7"}, + {file = "lxml-4.9.2-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:5f50a1c177e2fa3ee0667a5ab79fdc6b23086bc8b589d90b93b4bd17eb0e64d1"}, + {file = "lxml-4.9.2-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:090c6543d3696cbe15b4ac6e175e576bcc3f1ccfbba970061b7300b0c15a2140"}, + {file = "lxml-4.9.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:63da2ccc0857c311d764e7d3d90f429c252e83b52d1f8f1d1fe55be26827d1f4"}, + {file = "lxml-4.9.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:5b4545b8a40478183ac06c073e81a5ce4cf01bf1734962577cf2bb569a5b3bbf"}, + {file = "lxml-4.9.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2e430cd2824f05f2d4f687701144556646bae8f249fd60aa1e4c768ba7018947"}, + {file = "lxml-4.9.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6804daeb7ef69e7b36f76caddb85cccd63d0c56dedb47555d2fc969e2af6a1a5"}, + {file = "lxml-4.9.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a6e441a86553c310258aca15d1c05903aaf4965b23f3bc2d55f200804e005ee5"}, + {file = "lxml-4.9.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ca34efc80a29351897e18888c71c6aca4a359247c87e0b1c7ada14f0ab0c0fb2"}, + {file = "lxml-4.9.2-cp39-cp39-win32.whl", hash = "sha256:6b418afe5df18233fc6b6093deb82a32895b6bb0b1155c2cdb05203f583053f1"}, + {file = "lxml-4.9.2-cp39-cp39-win_amd64.whl", hash = "sha256:f1496ea22ca2c830cbcbd473de8f114a320da308438ae65abad6bab7867fe38f"}, + {file = "lxml-4.9.2-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:b264171e3143d842ded311b7dccd46ff9ef34247129ff5bf5066123c55c2431c"}, + {file = "lxml-4.9.2-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:0dc313ef231edf866912e9d8f5a042ddab56c752619e92dfd3a2c277e6a7299a"}, + {file = "lxml-4.9.2-pp38-pypy38_pp73-macosx_10_15_x86_64.whl", hash = "sha256:16efd54337136e8cd72fb9485c368d91d77a47ee2d42b057564aae201257d419"}, + {file = "lxml-4.9.2-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:0f2b1e0d79180f344ff9f321327b005ca043a50ece8713de61d1cb383fb8ac05"}, + {file = "lxml-4.9.2-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:7b770ed79542ed52c519119473898198761d78beb24b107acf3ad65deae61f1f"}, + {file = "lxml-4.9.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:efa29c2fe6b4fdd32e8ef81c1528506895eca86e1d8c4657fda04c9b3786ddf9"}, + {file = "lxml-4.9.2-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7e91ee82f4199af8c43d8158024cbdff3d931df350252288f0d4ce656df7f3b5"}, + {file = "lxml-4.9.2-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:b23e19989c355ca854276178a0463951a653309fb8e57ce674497f2d9f208746"}, + {file = "lxml-4.9.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:01d36c05f4afb8f7c20fd9ed5badca32a2029b93b1750f571ccc0b142531caf7"}, + {file = "lxml-4.9.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7b515674acfdcadb0eb5d00d8a709868173acece5cb0be3dd165950cbfdf5409"}, + {file = "lxml-4.9.2.tar.gz", hash = "sha256:2455cfaeb7ac70338b3257f41e21f0724f4b5b0c0e7702da67ee6c3640835b67"}, +] + +[package.extras] +cssselect = ["cssselect (>=0.7)"] +html5 = ["html5lib"] +htmlsoup = ["BeautifulSoup4"] +source = ["Cython (>=0.29.7)"] + +[[package]] +name = "packaging" +version = "23.0" +description = "Core utilities for Python packages" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "packaging-23.0-py3-none-any.whl", hash = "sha256:714ac14496c3e68c99c29b00845f7a2b85f3bb6f1078fd9f72fd20f0570002b2"}, + {file = "packaging-23.0.tar.gz", hash = "sha256:b6ad297f8907de0fa2fe1ccbd26fdaf387f5f47c7275fedf8cce89f99446cf97"}, +] + +[[package]] +name = "phonenumberslite" +version = "8.13.9" +description = "Python version of Google's common library for parsing, formatting, storing and validating international phone numbers." +category = "main" +optional = false +python-versions = "*" +files = [ + {file = "phonenumberslite-8.13.9-py2.py3-none-any.whl", hash = "sha256:20ea42c4fa86fd032200692587cb45bf2028ac5ec1e756d1c4b76b42fa4370bf"}, + {file = "phonenumberslite-8.13.9.tar.gz", hash = "sha256:f5f2333e8a3a2e45917796c258ce1742341c731feb4dd92dab3e773118ab24d0"}, +] + +[[package]] +name = "pikepdf" +version = "7.1.2" +description = "Read and write PDFs with Python, powered by qpdf" +category = "main" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pikepdf-7.1.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:81950bde71eeaa1fbef72164cf4c78f408250bdb2346e4a69aebe1fe0631c47b"}, + {file = "pikepdf-7.1.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7c7b5c328dda0f0f3de4fbfbf3f1f68a85cb957eed00a4bd2f7683a465dc5b5a"}, + {file = "pikepdf-7.1.2-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:51a9272db7a468476f74c39c0fa5dc6b501e298c709a4b8df8e4b393929d644a"}, + {file = "pikepdf-7.1.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b21521fc51194869027edbdcaf46cf20396344518b122085133c68ddd1dd770f"}, + {file = "pikepdf-7.1.2-cp310-cp310-win32.whl", hash = "sha256:3d88c0ecbfd0df33144cbe348a765f9a82bcc86a7cf18fb19df0d9eab6186398"}, + {file = "pikepdf-7.1.2-cp310-cp310-win_amd64.whl", hash = "sha256:7c7b8f74b144ee0c384a7b82e34d84da89821e0d3f0cf207c5af039c563dce06"}, + {file = "pikepdf-7.1.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4dd60ae07332b126a4d814955230f1852fcbd905ff72f1d3dde37ab7be192dfa"}, + {file = "pikepdf-7.1.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:641beb42fdf82cd15e079dd081ba410b54ea552ea81b884cc98885ac5541f73d"}, + {file = "pikepdf-7.1.2-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:df2a99208cb426d675faaa578a96d6f9b76eee76cb473a267b9ae85078176443"}, + {file = "pikepdf-7.1.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2400574cf79481683f6ce537ee5d1d7925b71ff4d863a026246cbc34be8aeef0"}, + {file = "pikepdf-7.1.2-cp311-cp311-win32.whl", hash = "sha256:d75dfaf6df6e7394d7865878eaf9f2dca1900ebcf1ab9e681672a12d6c7b329b"}, + {file = "pikepdf-7.1.2-cp311-cp311-win_amd64.whl", hash = "sha256:bb5ec2e06b3085413b69dbf3045a9a05a84d24ed8118221854c5465f6190ab35"}, + {file = "pikepdf-7.1.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:de41f314fa61ab4d2368ab63b7e0f1ad72aab3115cfc90c8e123201fb5c2bcb5"}, + {file = "pikepdf-7.1.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3af17fac243f6e5fef49c57dc99f858957e4210f1b408e1433aa6be29bc49dfd"}, + {file = "pikepdf-7.1.2-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4122e2786d8a21eeb47cb9e2eb3a6fd758280e6b3e873844f44e01a3bb5fda1a"}, + {file = "pikepdf-7.1.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:24719a4d1a154afb5fac55920154928f4a68e9c38e30ab0218c632f4fbf448cc"}, + {file = "pikepdf-7.1.2-cp38-cp38-win32.whl", hash = "sha256:966008bbe04ac3f282bc026e8f66903c254c048c01f8a5a06e4f55b4d36605df"}, + {file = "pikepdf-7.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:ca1a654a97d3d0f3340f934420c8c3a5522e43bb97ed25e31f06b618da9d64cd"}, + {file = "pikepdf-7.1.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:17656a15b9ff5868869a79ff40765689a6705d2584e8ab63aff1d0365652b6f1"}, + {file = "pikepdf-7.1.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e87b6a149e39598451d526493ec16f50b67ea899888ee96c14c1545eb1115c53"}, + {file = "pikepdf-7.1.2-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d8a8d24fef8802899f8a6aa89ab2d119da7c6acfaa978bdfdc76854854680a7f"}, + {file = "pikepdf-7.1.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0cca81a93b9486d98335b310fd1352ec62197bf9e3bf2774b822d5d862bde41"}, + {file = "pikepdf-7.1.2-cp39-cp39-win32.whl", hash = "sha256:db2e4e37a226aac13f6000b22612a8ccfb5764f34ad2b48c39eb38b3bb7cfb35"}, + {file = "pikepdf-7.1.2-cp39-cp39-win_amd64.whl", hash = "sha256:1b64ae8d9b35713b317d7a4f429c14202ff9a2c5d63c8200b4fc1401e52c7e10"}, + {file = "pikepdf-7.1.2-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:09e142c484179873249f5c8fc50b8a5e66801feb81fc5111463b36b8396faf13"}, + {file = "pikepdf-7.1.2-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7cfd1bc95fdcece892b2506a9b9229d82a72845e74aa13c6e9578f767da41d1"}, + {file = "pikepdf-7.1.2-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e95d872385d7226f48ced00b2f56f1c6fd963d1729fa48baf697f6e98748163"}, + {file = "pikepdf-7.1.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:5c5ec0c28b0b01aa5a306fc4be45e9948a4f9699f250c68495e279f02fdee0d0"}, + {file = "pikepdf-7.1.2-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:1937f88458f183346d863cca71d9c4ea31792db38feba331fde8342473654330"}, + {file = "pikepdf-7.1.2-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:98ccba51e3a0575d7ffaedc61b58fc10fca28151e2429825a36461253485f45d"}, + {file = "pikepdf-7.1.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac00e736ee8bc5e59ab7c72498bb895c43cd72bae3fa0cadc163663d578fefd4"}, + {file = "pikepdf-7.1.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:e35056389134d8ed7d85dce798f9f4b498221c9b640fad4f8f2f8462aaade512"}, + {file = "pikepdf-7.1.2.tar.gz", hash = "sha256:b177e8437fe8efdfb7a30c57f361cae1bf2054117856459ca6565c94d32cb5b5"}, +] + +[package.dependencies] +deprecation = "*" +lxml = ">=4.8" +packaging = "*" +Pillow = ">=9.0" + +[package.extras] +docs = ["GitPython", "PyGithub", "Sphinx (>=3)", "ipython", "matplotlib", "pybind11", "requests", "setuptools-scm", "sphinx-design", "sphinx-issues", "sphinx-rtd-theme", "tomli"] +mypy = ["lxml-stubs", "types-Pillow", "types-requests", "types-setuptools"] +test = ["attrs (>=20.2.0)", "coverage[toml]", "hypothesis (>=6.36)", "psutil (>=5.9)", "pybind11", "pytest (>=6.2.5)", "pytest-cov (>=3.0.0)", "pytest-timeout (>=2.1.0)", "pytest-xdist (>=2.5.0)", "python-dateutil (>=2.8.1)", "python-xmp-toolkit (>=2.0.1)", "tomli"] + +[[package]] +name = "pillow" +version = "9.5.0" +description = "Python Imaging Library (Fork)" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "Pillow-9.5.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:ace6ca218308447b9077c14ea4ef381ba0b67ee78d64046b3f19cf4e1139ad16"}, + {file = "Pillow-9.5.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d3d403753c9d5adc04d4694d35cf0391f0f3d57c8e0030aac09d7678fa8030aa"}, + {file = "Pillow-9.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ba1b81ee69573fe7124881762bb4cd2e4b6ed9dd28c9c60a632902fe8db8b38"}, + {file = "Pillow-9.5.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fe7e1c262d3392afcf5071df9afa574544f28eac825284596ac6db56e6d11062"}, + {file = "Pillow-9.5.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f36397bf3f7d7c6a3abdea815ecf6fd14e7fcd4418ab24bae01008d8d8ca15e"}, + {file = "Pillow-9.5.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:252a03f1bdddce077eff2354c3861bf437c892fb1832f75ce813ee94347aa9b5"}, + {file = "Pillow-9.5.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:85ec677246533e27770b0de5cf0f9d6e4ec0c212a1f89dfc941b64b21226009d"}, + {file = "Pillow-9.5.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b416f03d37d27290cb93597335a2f85ed446731200705b22bb927405320de903"}, + {file = "Pillow-9.5.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:1781a624c229cb35a2ac31cc4a77e28cafc8900733a864870c49bfeedacd106a"}, + {file = "Pillow-9.5.0-cp310-cp310-win32.whl", hash = "sha256:8507eda3cd0608a1f94f58c64817e83ec12fa93a9436938b191b80d9e4c0fc44"}, + {file = "Pillow-9.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:d3c6b54e304c60c4181da1c9dadf83e4a54fd266a99c70ba646a9baa626819eb"}, + {file = "Pillow-9.5.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:7ec6f6ce99dab90b52da21cf0dc519e21095e332ff3b399a357c187b1a5eee32"}, + {file = "Pillow-9.5.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:560737e70cb9c6255d6dcba3de6578a9e2ec4b573659943a5e7e4af13f298f5c"}, + {file = "Pillow-9.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:96e88745a55b88a7c64fa49bceff363a1a27d9a64e04019c2281049444a571e3"}, + {file = "Pillow-9.5.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d9c206c29b46cfd343ea7cdfe1232443072bbb270d6a46f59c259460db76779a"}, + {file = "Pillow-9.5.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cfcc2c53c06f2ccb8976fb5c71d448bdd0a07d26d8e07e321c103416444c7ad1"}, + {file = "Pillow-9.5.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:a0f9bb6c80e6efcde93ffc51256d5cfb2155ff8f78292f074f60f9e70b942d99"}, + {file = "Pillow-9.5.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:8d935f924bbab8f0a9a28404422da8af4904e36d5c33fc6f677e4c4485515625"}, + {file = "Pillow-9.5.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:fed1e1cf6a42577953abbe8e6cf2fe2f566daebde7c34724ec8803c4c0cda579"}, + {file = "Pillow-9.5.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c1170d6b195555644f0616fd6ed929dfcf6333b8675fcca044ae5ab110ded296"}, + {file = "Pillow-9.5.0-cp311-cp311-win32.whl", hash = "sha256:54f7102ad31a3de5666827526e248c3530b3a33539dbda27c6843d19d72644ec"}, + {file = "Pillow-9.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:cfa4561277f677ecf651e2b22dc43e8f5368b74a25a8f7d1d4a3a243e573f2d4"}, + {file = "Pillow-9.5.0-cp311-cp311-win_arm64.whl", hash = "sha256:965e4a05ef364e7b973dd17fc765f42233415974d773e82144c9bbaaaea5d089"}, + {file = "Pillow-9.5.0-cp312-cp312-win32.whl", hash = "sha256:22baf0c3cf0c7f26e82d6e1adf118027afb325e703922c8dfc1d5d0156bb2eeb"}, + {file = "Pillow-9.5.0-cp312-cp312-win_amd64.whl", hash = "sha256:432b975c009cf649420615388561c0ce7cc31ce9b2e374db659ee4f7d57a1f8b"}, + {file = "Pillow-9.5.0-cp37-cp37m-macosx_10_10_x86_64.whl", hash = "sha256:5d4ebf8e1db4441a55c509c4baa7a0587a0210f7cd25fcfe74dbbce7a4bd1906"}, + {file = "Pillow-9.5.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:375f6e5ee9620a271acb6820b3d1e94ffa8e741c0601db4c0c4d3cb0a9c224bf"}, + {file = "Pillow-9.5.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:99eb6cafb6ba90e436684e08dad8be1637efb71c4f2180ee6b8f940739406e78"}, + {file = "Pillow-9.5.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2dfaaf10b6172697b9bceb9a3bd7b951819d1ca339a5ef294d1f1ac6d7f63270"}, + {file = "Pillow-9.5.0-cp37-cp37m-manylinux_2_28_aarch64.whl", hash = "sha256:763782b2e03e45e2c77d7779875f4432e25121ef002a41829d8868700d119392"}, + {file = "Pillow-9.5.0-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:35f6e77122a0c0762268216315bf239cf52b88865bba522999dc38f1c52b9b47"}, + {file = "Pillow-9.5.0-cp37-cp37m-win32.whl", hash = "sha256:aca1c196f407ec7cf04dcbb15d19a43c507a81f7ffc45b690899d6a76ac9fda7"}, + {file = "Pillow-9.5.0-cp37-cp37m-win_amd64.whl", hash = "sha256:322724c0032af6692456cd6ed554bb85f8149214d97398bb80613b04e33769f6"}, + {file = "Pillow-9.5.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:a0aa9417994d91301056f3d0038af1199eb7adc86e646a36b9e050b06f526597"}, + {file = "Pillow-9.5.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f8286396b351785801a976b1e85ea88e937712ee2c3ac653710a4a57a8da5d9c"}, + {file = "Pillow-9.5.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c830a02caeb789633863b466b9de10c015bded434deb3ec87c768e53752ad22a"}, + {file = "Pillow-9.5.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fbd359831c1657d69bb81f0db962905ee05e5e9451913b18b831febfe0519082"}, + {file = "Pillow-9.5.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8fc330c3370a81bbf3f88557097d1ea26cd8b019d6433aa59f71195f5ddebbf"}, + {file = "Pillow-9.5.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:7002d0797a3e4193c7cdee3198d7c14f92c0836d6b4a3f3046a64bd1ce8df2bf"}, + {file = "Pillow-9.5.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:229e2c79c00e85989a34b5981a2b67aa079fd08c903f0aaead522a1d68d79e51"}, + {file = "Pillow-9.5.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9adf58f5d64e474bed00d69bcd86ec4bcaa4123bfa70a65ce72e424bfb88ed96"}, + {file = "Pillow-9.5.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:662da1f3f89a302cc22faa9f14a262c2e3951f9dbc9617609a47521c69dd9f8f"}, + {file = "Pillow-9.5.0-cp38-cp38-win32.whl", hash = "sha256:6608ff3bf781eee0cd14d0901a2b9cc3d3834516532e3bd673a0a204dc8615fc"}, + {file = "Pillow-9.5.0-cp38-cp38-win_amd64.whl", hash = "sha256:e49eb4e95ff6fd7c0c402508894b1ef0e01b99a44320ba7d8ecbabefddcc5569"}, + {file = "Pillow-9.5.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:482877592e927fd263028c105b36272398e3e1be3269efda09f6ba21fd83ec66"}, + {file = "Pillow-9.5.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3ded42b9ad70e5f1754fb7c2e2d6465a9c842e41d178f262e08b8c85ed8a1d8e"}, + {file = "Pillow-9.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c446d2245ba29820d405315083d55299a796695d747efceb5717a8b450324115"}, + {file = "Pillow-9.5.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8aca1152d93dcc27dc55395604dcfc55bed5f25ef4c98716a928bacba90d33a3"}, + {file = "Pillow-9.5.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:608488bdcbdb4ba7837461442b90ea6f3079397ddc968c31265c1e056964f1ef"}, + {file = "Pillow-9.5.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:60037a8db8750e474af7ffc9faa9b5859e6c6d0a50e55c45576bf28be7419705"}, + {file = "Pillow-9.5.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:07999f5834bdc404c442146942a2ecadd1cb6292f5229f4ed3b31e0a108746b1"}, + {file = "Pillow-9.5.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a127ae76092974abfbfa38ca2d12cbeddcdeac0fb71f9627cc1135bedaf9d51a"}, + {file = "Pillow-9.5.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:489f8389261e5ed43ac8ff7b453162af39c3e8abd730af8363587ba64bb2e865"}, + {file = "Pillow-9.5.0-cp39-cp39-win32.whl", hash = "sha256:9b1af95c3a967bf1da94f253e56b6286b50af23392a886720f563c547e48e964"}, + {file = "Pillow-9.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:77165c4a5e7d5a284f10a6efaa39a0ae8ba839da344f20b111d62cc932fa4e5d"}, + {file = "Pillow-9.5.0-pp38-pypy38_pp73-macosx_10_10_x86_64.whl", hash = "sha256:833b86a98e0ede388fa29363159c9b1a294b0905b5128baf01db683672f230f5"}, + {file = "Pillow-9.5.0-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aaf305d6d40bd9632198c766fb64f0c1a83ca5b667f16c1e79e1661ab5060140"}, + {file = "Pillow-9.5.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0852ddb76d85f127c135b6dd1f0bb88dbb9ee990d2cd9aa9e28526c93e794fba"}, + {file = "Pillow-9.5.0-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:91ec6fe47b5eb5a9968c79ad9ed78c342b1f97a091677ba0e012701add857829"}, + {file = "Pillow-9.5.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:cb841572862f629b99725ebaec3287fc6d275be9b14443ea746c1dd325053cbd"}, + {file = "Pillow-9.5.0-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:c380b27d041209b849ed246b111b7c166ba36d7933ec6e41175fd15ab9eb1572"}, + {file = "Pillow-9.5.0-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7c9af5a3b406a50e313467e3565fc99929717f780164fe6fbb7704edba0cebbe"}, + {file = "Pillow-9.5.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5671583eab84af046a397d6d0ba25343c00cd50bce03787948e0fff01d4fd9b1"}, + {file = "Pillow-9.5.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:84a6f19ce086c1bf894644b43cd129702f781ba5751ca8572f08aa40ef0ab7b7"}, + {file = "Pillow-9.5.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:1e7723bd90ef94eda669a3c2c19d549874dd5badaeefabefd26053304abe5799"}, + {file = "Pillow-9.5.0.tar.gz", hash = "sha256:bf548479d336726d7a0eceb6e767e179fbde37833ae42794602631a070d630f1"}, +] + +[package.extras] +docs = ["furo", "olefile", "sphinx (>=2.4)", "sphinx-copybutton", "sphinx-inline-tabs", "sphinx-removed-in", "sphinxext-opengraph"] +tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout"] + +[[package]] +name = "pillow-heif" +version = "0.10.1" +description = "Python interface for libheif library" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pillow_heif-0.10.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2e34110c906035f9902bb7dee964384e33b45c4545cee0fc4f78bd06b6cffbe0"}, + {file = "pillow_heif-0.10.1-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:9d67655cde69eb76f7b5a3f3b3069998d43c9cd157a1e41997fe165a44614401"}, + {file = "pillow_heif-0.10.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd3b2bfa20f3af072c1a1fedbdee441b71972969e09efc6b0f9789b540d51899"}, + {file = "pillow_heif-0.10.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:091e43a45b1ed155c65a3a99252ba5d1ea7ba9ba7e9880afa06997533abe4875"}, + {file = "pillow_heif-0.10.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd01437bca86e61b252a0e730c2181b3dd3bfb57367c0473a8dca6db53be5818"}, + {file = "pillow_heif-0.10.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:2229077a834182477cfb8f665c4c42ce9766d90d746d74c7ab6d48945c8a6992"}, + {file = "pillow_heif-0.10.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:f62617d91e6656535fde6ddb61f413c27e81f2d58eb38201b62982a05a729acd"}, + {file = "pillow_heif-0.10.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f98a5c77626bfb1dfdc83939fe44eb11ab721edfd4ca516e8e9b8e3c0dcfbe13"}, + {file = "pillow_heif-0.10.1-cp310-cp310-win_amd64.whl", hash = "sha256:2c791917a9e286f3d692f5c162dedf07e65ebab18c4df7ad7a5a109d395aaca9"}, + {file = "pillow_heif-0.10.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b21d19372d9a1cc22a6e639cc929bc3abae7f701ee7c8b66bad5302f36977eef"}, + {file = "pillow_heif-0.10.1-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:c57bbb1a1aabb88efa72ba24300a3df733826ed8892d5bbcc8317b4262e95a03"}, + {file = "pillow_heif-0.10.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4d4b04bf35280f7d895ba783c4b7f7e3d0f139c99fd736e1831d2cfe06a41c10"}, + {file = "pillow_heif-0.10.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a2722a220d898cbcd1e3d6bcb669a28cfcb240d05f41bcd57d4b78af991b32cc"}, + {file = "pillow_heif-0.10.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bae92c3e9b348e367122b140fd7a744bdb087c551ac00efc2b486a410569d00f"}, + {file = "pillow_heif-0.10.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:400b25a1110ef5dfe394255646bae5318779d2ec4c787792bd5ba72956df628f"}, + {file = "pillow_heif-0.10.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:16db680b312ea684b3b88a3f97b3b122df48e12a057351c3ed1f435dd0a634d2"}, + {file = "pillow_heif-0.10.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:db7363f190faeda67b15cf774fddf6c658a5681abb8b9860dcbc47cc85d668f8"}, + {file = "pillow_heif-0.10.1-cp311-cp311-win_amd64.whl", hash = "sha256:7b84073e2997f34062751e8dd0a644e3e8f6fd952265edfe7ee021531a939018"}, + {file = "pillow_heif-0.10.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:ef1c87acea720edf784fa3da77d3292f288de1c9f40e9808f4c6837dd167afc3"}, + {file = "pillow_heif-0.10.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dae1ca05c818abc31bbc259a17554c3dd9faca4d79618f06f0cc2439320c4f58"}, + {file = "pillow_heif-0.10.1-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3dece6099058422ab7a66b713e9fc3ea4e21946a95442c276956825602a0782c"}, + {file = "pillow_heif-0.10.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8173d2843207a1c3265e382e7dcb02d8d5f882b5cd8ab9a1701c5bf47639ae22"}, + {file = "pillow_heif-0.10.1-cp37-cp37m-win_amd64.whl", hash = "sha256:0ed8652a520a46aa936b816bb3fcd445aba5ae6678f444927dcd6e7f831e02db"}, + {file = "pillow_heif-0.10.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:da5c734c9510ccb05f42199bedb6b0f126f9e8447e3bde3ad03f3882817ad08c"}, + {file = "pillow_heif-0.10.1-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:27c1b4e388fde47f690a0b8e4299a8da57329a35e1924444028865e0efd20430"}, + {file = "pillow_heif-0.10.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:05406e07d6640e122729e249ad6a2bf28c1aabe0dde0a71217ad54c36854e0e9"}, + {file = "pillow_heif-0.10.1-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffa99da11b0328dc483976d5c4e62cccc75903e0bcc861e3d9fbce2752f0dff5"}, + {file = "pillow_heif-0.10.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd6f4f01006dfa5cfefd1e960763e2f3bd829e0c6e6d8202462fc3f7d0b91dfd"}, + {file = "pillow_heif-0.10.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ea6cf2255179bb667b75b834845083f23959fc3873c444a15f54cad415e501dd"}, + {file = "pillow_heif-0.10.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:bc12fc70de7f59a313678255b9abc7acd4915032cdbdb887a402f1e6c632e95d"}, + {file = "pillow_heif-0.10.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:50cbb535e9b776bd327d7344e22bec1f7457ae587487189a136339cf90952a99"}, + {file = "pillow_heif-0.10.1-cp38-cp38-win_amd64.whl", hash = "sha256:82143407c590122e1d36bf674d7d589d20ed76fac243a65d1704e6b0fbc14dde"}, + {file = "pillow_heif-0.10.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4bf6abce62e934e33dbd5cf8528c76c746397116a87128b913278554eb840c3b"}, + {file = "pillow_heif-0.10.1-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:5909585d1878dfe214a7bc6ae502ce6e1ee99cab88dd0669714c2d524f8509da"}, + {file = "pillow_heif-0.10.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:95c0e83ef5237b18ae5e4adc5e5c9261b23c13704abedf1bbb46cc44d086312a"}, + {file = "pillow_heif-0.10.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:158dc0eabaadb13240d2bc14ce11047a661a4748e56423a5346c4ffa9831e0e3"}, + {file = "pillow_heif-0.10.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:856a4f46a689bc037c0e51b8ceae1e7944907a2c8a3767dd4d72c9f781ed82b7"}, + {file = "pillow_heif-0.10.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:41a75fbf044db03d3e5d64c8288b7ea3ba4b9575ff1078f1df814936f15d11b7"}, + {file = "pillow_heif-0.10.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:e9745aab7ed2bb0e53548e1e2c906721b0bc76adedeb17e661ec9ccbd8b698fd"}, + {file = "pillow_heif-0.10.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5ade9dbfbc5653fcf345fd8db75fb4fec603b521b1a832f091a809258d2232b5"}, + {file = "pillow_heif-0.10.1-cp39-cp39-win_amd64.whl", hash = "sha256:41610fae8e2494f605b7b5c2508f6c2688227a7cd3f2c71e1fff966fd9476297"}, + {file = "pillow_heif-0.10.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a49c5671f74d8d58e4a0d507a3cdbd37c28693f5ad50b5bed5983a2b693e572a"}, + {file = "pillow_heif-0.10.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:de3a2929e509a93981866fb9ec2f313ee349312009ca50ed1ca999c4039c31e1"}, + {file = "pillow_heif-0.10.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1e50cab15f2531ea5bdda9b15e5f2d05bf023b607e4322bc600dd18e3783757"}, + {file = "pillow_heif-0.10.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:dc143d3f61b7a7d28f4200be9cdcf0149b5da44511d8faacb4778a9dc264e900"}, + {file = "pillow_heif-0.10.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:c57dc8496e59d4d9b8f79e66be148e5c898704b7bbd65531d69352bce2e820f0"}, + {file = "pillow_heif-0.10.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:37dd748836c8d5d82ef5395cd8aee523dba5bc0c6a77353baacf7868de41eec3"}, + {file = "pillow_heif-0.10.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28a3872f66d55d74ea4c18f1460ccba1bae20874100331b58dae6bbc240c63a5"}, + {file = "pillow_heif-0.10.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:9c6880056df5898cada6f65b5dc6ba8259da1b570491c18da867420f32314512"}, + {file = "pillow_heif-0.10.1.tar.gz", hash = "sha256:af9bd9d8fc189451edb193f321214207bf890d0ac80ac697056def39fec7565d"}, +] + +[package.dependencies] +pillow = ">=8.4.0" + +[package.extras] +dev = ["coverage", "defusedxml", "numpy", "opencv-python (==4.7.0.72)", "packaging", "pre-commit", "pylint", "pympler", "pytest"] +docs = ["sphinx (>=4.4)", "sphinx-issues (>=3.0.1)", "sphinx-rtd-theme (>=1.0)"] +tests = ["defusedxml", "numpy", "packaging", "pympler", "pytest"] +tests-min = ["defusedxml", "packaging", "pytest"] + +[[package]] +name = "playwright" +version = "1.32.1" +description = "A high-level API to automate web browsers" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "playwright-1.32.1-py3-none-macosx_10_13_x86_64.whl", hash = "sha256:e2f919e8611f598d6e81bd12ab24c5987955b05fc663c98b862034a955387300"}, + {file = "playwright-1.32.1-py3-none-macosx_11_0_arm64.whl", hash = "sha256:5dbf28b8256c2f570a66d6a7c04cd0bfb5225e696e01f85cf5aa49e29ea95b42"}, + {file = "playwright-1.32.1-py3-none-macosx_11_0_universal2.whl", hash = "sha256:42473495f8af0279d868cc541d0c6d3733a8adb117253499dae85203104b0824"}, + {file = "playwright-1.32.1-py3-none-manylinux1_x86_64.whl", hash = "sha256:83123330e2913a28d11bb8846f7c81a4736553c80f3e9748d213bcaa24fafe91"}, + {file = "playwright-1.32.1-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d56a743f7d88a313b25a88422779c64e5d5a95baa805b9dfd1c5785aa01d217d"}, + {file = "playwright-1.32.1-py3-none-win32.whl", hash = "sha256:274bfdd413a979346ce66e99c993c105a123e48da591a65638e5cdf518c90172"}, + {file = "playwright-1.32.1-py3-none-win_amd64.whl", hash = "sha256:32bb5645904b5ba3096a4696c70ce3213eb2310c77273140dc5de14498a84134"}, +] + +[package.dependencies] +greenlet = "2.0.1" +pyee = "9.0.4" + +[[package]] +name = "pluggy" +version = "1.0.0" +description = "plugin and hook calling mechanisms for python" +category = "main" +optional = false +python-versions = ">=3.6" +files = [ + {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, + {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "prompt-toolkit" +version = "3.0.38" +description = "Library for building powerful interactive command lines in Python" +category = "main" +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "prompt_toolkit-3.0.38-py3-none-any.whl", hash = "sha256:45ea77a2f7c60418850331366c81cf6b5b9cf4c7fd34616f733c5427e6abbb1f"}, + {file = "prompt_toolkit-3.0.38.tar.gz", hash = "sha256:23ac5d50538a9a38c8bde05fecb47d0b403ecd0662857a86f886f798563d5b9b"}, +] + +[package.dependencies] +wcwidth = "*" + +[[package]] +name = "psycopg" +version = "3.1.8" +description = "PostgreSQL database adapter for Python" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "psycopg-3.1.8-py3-none-any.whl", hash = "sha256:b1500c42063abaa01d30b056f0b300826b8dd8d586900586029a294ce74af327"}, + {file = "psycopg-3.1.8.tar.gz", hash = "sha256:59b4a71536b146925513c0234dfd1dc42b81e65d56ce5335dff4813434dbc113"}, +] + +[package.dependencies] +psycopg-binary = {version = ">=3.1.6,<=3.1.8", optional = true, markers = "extra == \"binary\""} +typing-extensions = ">=4.1" +tzdata = {version = "*", markers = "sys_platform == \"win32\""} + +[package.extras] +binary = ["psycopg-binary (>=3.1.6,<=3.1.8)"] +c = ["psycopg-c (>=3.1.6,<=3.1.8)"] +dev = ["black (>=22.3.0)", "dnspython (>=2.1)", "flake8 (>=4.0)", "mypy (>=0.990)", "types-setuptools (>=57.4)", "wheel (>=0.37)"] +docs = ["Sphinx (>=5.0)", "furo (==2022.6.21)", "sphinx-autobuild (>=2021.3.14)", "sphinx-autodoc-typehints (>=1.12)"] +pool = ["psycopg-pool"] +test = ["mypy (>=0.990)", "pproxy (>=2.7)", "pytest (>=6.2.5)", "pytest-asyncio (>=0.17)", "pytest-cov (>=3.0)", "pytest-randomly (>=3.5)"] + +[[package]] +name = "psycopg-binary" +version = "3.1.8" +description = "PostgreSQL database adapter for Python -- C optimisation distribution" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "psycopg_binary-3.1.8-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f32684b4fc3863190c4b9c141342b2cbdb81632731b9c68e6946d772ba0560f2"}, + {file = "psycopg_binary-3.1.8-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:37212244817b3cc7193ee4b5d60765c020ead5e53589c935d249bfb96452878b"}, + {file = "psycopg_binary-3.1.8-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:32f2563db6e44372f593a76c94452ce476306e0fb508e092f3fab4d9091a9974"}, + {file = "psycopg_binary-3.1.8-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b36fcc67d8b23935ee871a6331c9631ecfdb11452a64f34b8ecb9642de43aec8"}, + {file = "psycopg_binary-3.1.8-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8bb9f577a09e799322008e574a1671c5b2645e990f954be2b7dae669e3779750"}, + {file = "psycopg_binary-3.1.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ac81e68262b03163ca977f34448b4cadbc49db929146406b4706fe2141d76d1"}, + {file = "psycopg_binary-3.1.8-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:fbfc9ae4edfb76c14d09bd70d6f399eb935008bbb3bc4cd6a4ab76645ba3443e"}, + {file = "psycopg_binary-3.1.8-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:8602836138bc209aa5f9821c8e8439466f151c3ec4fcdbc740697e49cff1b920"}, + {file = "psycopg_binary-3.1.8-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:9cf94411f5a9064cf4ab1066976a7bce44f970f9603a01585c1040465eb312f9"}, + {file = "psycopg_binary-3.1.8-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a8fee8d846f9614331bd764850b4c1363730d36e88e14aa28ec4639318fd2093"}, + {file = "psycopg_binary-3.1.8-cp310-cp310-win_amd64.whl", hash = "sha256:2d5ae85c6037e45862e304d39ec24a24ddebc7d2b5b3601155dddc07c19c0cdc"}, + {file = "psycopg_binary-3.1.8-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:17d187743d8ca63d24fa724bfee76e50b6473f1fef998cebcd35348b0d5936de"}, + {file = "psycopg_binary-3.1.8-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3762e73b6743139c5258d8b3a294edb309c691ba4f172c9f272315501390e7c2"}, + {file = "psycopg_binary-3.1.8-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87973d064a72bc2716309381b713f49f57c48100fb1f046943b780a04bc011f6"}, + {file = "psycopg_binary-3.1.8-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5f8400d400f64f659a897d1ef67212012524cc44882bd24387515df9bb723364"}, + {file = "psycopg_binary-3.1.8-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f45766ce8e74eb456d8672116e936391e67290c50fd0cc1b41876b61261869b6"}, + {file = "psycopg_binary-3.1.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:33ecf37c6348232073ea62b0630655479021f855635f72b4170693032993cdaf"}, + {file = "psycopg_binary-3.1.8-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:10b8f1f96f5e8f02a60ba76dab315d3e71cb76c18ff49aa18bbf48a8089c3202"}, + {file = "psycopg_binary-3.1.8-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:58cb0d007768dbccb67783baacf1c4016c7be8a494339a514321edee3d3b787a"}, + {file = "psycopg_binary-3.1.8-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:59d8dbea1bc3dbbc819c0320cb2b641dc362389b096098c62172f49605f58284"}, + {file = "psycopg_binary-3.1.8-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4325cee1641c25719bcf063f7683e909cb8cc9932ace3f8bf20ce112e47ce743"}, + {file = "psycopg_binary-3.1.8-cp311-cp311-win_amd64.whl", hash = "sha256:064502d191d7bc32a48670cc605ce49abcdb5e01e2697ee3fe546cff330fb8ae"}, + {file = "psycopg_binary-3.1.8-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:5fd8492931865cc7181169b2dbf472377a5b5808f001e73f5c25b05bb61e9622"}, + {file = "psycopg_binary-3.1.8-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b4d1a4ea2ca20f0bc944bc28e4addb80e6a22ac60a85fc7035e57c88e96f3a18"}, + {file = "psycopg_binary-3.1.8-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c27be5ddf4a05146ae7fb8429e9367dad0dc278a7d0e2f5094dd533195c4f8a1"}, + {file = "psycopg_binary-3.1.8-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fa8ca48a35be0f9880ed2093c213f07d318fa9389a2b9194196c239e41a77841"}, + {file = "psycopg_binary-3.1.8-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf59e1d06f420930fc4c16a42ed6476c60c83976c82e53012dbca45f009d5978"}, + {file = "psycopg_binary-3.1.8-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:cb3013b76cbab4a903f3b9c87f4518335627cb05fd89f9e04520c1743c2b919b"}, + {file = "psycopg_binary-3.1.8-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:db84eaa9e2d13e37a97dcd39d2fe78e0a3052c9aa67b5f0b4f3d346a155f4d21"}, + {file = "psycopg_binary-3.1.8-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:2c3d268cf2dbb79e52a555c2e7b26c6df2d014f3fb918d512ffc25ecc9c54582"}, + {file = "psycopg_binary-3.1.8-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0fe6205af5f63ee6e4816b267bf06add5934a259cddcf7dfdfc8ed738f5127b2"}, + {file = "psycopg_binary-3.1.8-cp37-cp37m-win_amd64.whl", hash = "sha256:f99806a5b9a5ba5cb5f46a0fa0440cd721556e0af09a7cadcc39e27ae9b1807e"}, + {file = "psycopg_binary-3.1.8-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0cc5d5a9b0acbf38e0b4de1c701d235f0cb750ef3de528dedfdbab1a367f2396"}, + {file = "psycopg_binary-3.1.8-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:478ecbb774398e5df6ee365a4d0a77f382a65f140e76720909804255c7801d4a"}, + {file = "psycopg_binary-3.1.8-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b40b56c5b3ffa8481f7bebb08473602ddb8e2e86ba25bf9261ba428eb7887175"}, + {file = "psycopg_binary-3.1.8-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:37df8714837d2c701ba4c54462a189b95d1a4439d4d147fb71018560e9a60547"}, + {file = "psycopg_binary-3.1.8-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:29a38b48cbec8484d83efea4d1d0707e49a3c51a2273cfbaa3d9ba280d3df7d9"}, + {file = "psycopg_binary-3.1.8-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c1a2209ef4df25f4ed8d91924bd4d9c7028d254e61216366c4b894c8a6ea4f88"}, + {file = "psycopg_binary-3.1.8-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:858a794c2d5e984627503581f03cc68cef97ee080993b7b6a0b7b30cb4fac107"}, + {file = "psycopg_binary-3.1.8-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:574c8b7b51e8d5c06f27125fc218d1328c018c0c1ad8f1202033aa6897b8ee99"}, + {file = "psycopg_binary-3.1.8-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:e3dc783eedde10f966039ecc5f96f7df25c288ea4f6795d28b990f312c33ff09"}, + {file = "psycopg_binary-3.1.8-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:94f9e7ccbfdba1c4f5de80b615187eb47a351ab64a9123d87aea4bf347c1e1d8"}, + {file = "psycopg_binary-3.1.8-cp38-cp38-win_amd64.whl", hash = "sha256:1425c2cc4cfd4778d9dee578541f11546a93fc2f5c558a0411c94026a1cf94c7"}, + {file = "psycopg_binary-3.1.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e68e8b8077cd45dd2683fcd9a384e7672b400e26c0c7d04dac0cf0763c12be78"}, + {file = "psycopg_binary-3.1.8-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:60b22dd46e4e4f678379cf3388468171c2ecea74e90b1332d173ffa8cd83315f"}, + {file = "psycopg_binary-3.1.8-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:61a1ccef7e0bf6128a7818c9d22cc850cf7649cee9541e82e4a8c080a734024d"}, + {file = "psycopg_binary-3.1.8-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e7a7b41eba96c7b9648efee57298f1aa0d96e081dea76489f52113536981712"}, + {file = "psycopg_binary-3.1.8-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a161785b1c8e26cd8e8d5436fa39ba2a8af590c17f1741aae11f8076a08485e6"}, + {file = "psycopg_binary-3.1.8-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a978d2bea09265eb6ebcd1b8a3aa05ea4118aa4013cb9669e12a8656975385cd"}, + {file = "psycopg_binary-3.1.8-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:251d2e6dca112dd359c029f422a025d75e78f2f2af4a2aceff506fdc5120f5f9"}, + {file = "psycopg_binary-3.1.8-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:a1f052642a54eda53786fa8b72fca2e48ceaf0fc2f3e8709c87694fd7c45ac50"}, + {file = "psycopg_binary-3.1.8-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:73747e6a5dfb05500ff3857f9b9ee50e4f4f663250454d773b98d818545f10fa"}, + {file = "psycopg_binary-3.1.8-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:811d870ca9e97875db92f9b346492c4fa7a9edd74dce3604015dd13389fef46a"}, + {file = "psycopg_binary-3.1.8-cp39-cp39-win_amd64.whl", hash = "sha256:8a0f425171e95379f1fe93b41d67c6dfe85b6b635944facf07ca26ff7fa8ab1d"}, +] + +[[package]] +name = "pycparser" +version = "2.21" +description = "C parser in Python" +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, + {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, +] + +[[package]] +name = "pycryptodomex" +version = "3.17" +description = "Cryptographic library for Python" +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "pycryptodomex-3.17-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:12056c38e49d972f9c553a3d598425f8a1c1d35b2e4330f89d5ff1ffb70de041"}, + {file = "pycryptodomex-3.17-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:ab33c2d9f275e05e235dbca1063753b5346af4a5cac34a51fa0da0d4edfb21d7"}, + {file = "pycryptodomex-3.17-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:caa937ff29d07a665dfcfd7a84f0d4207b2ebf483362fa9054041d67fdfacc20"}, + {file = "pycryptodomex-3.17-cp27-cp27m-manylinux2014_aarch64.whl", hash = "sha256:db23d7341e21b273d2440ec6faf6c8b1ca95c8894da612e165be0b89a8688340"}, + {file = "pycryptodomex-3.17-cp27-cp27m-musllinux_1_1_aarch64.whl", hash = "sha256:f854c8476512cebe6a8681cc4789e4fcff6019c17baa0fd72b459155dc605ab4"}, + {file = "pycryptodomex-3.17-cp27-cp27m-win32.whl", hash = "sha256:a57e3257bacd719769110f1f70dd901c5b6955e9596ad403af11a3e6e7e3311c"}, + {file = "pycryptodomex-3.17-cp27-cp27m-win_amd64.whl", hash = "sha256:d38ab9e53b1c09608ba2d9b8b888f1e75d6f66e2787e437adb1fecbffec6b112"}, + {file = "pycryptodomex-3.17-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:3c2516b42437ae6c7a29ef3ddc73c8d4714e7b6df995b76be4695bbe4b3b5cd2"}, + {file = "pycryptodomex-3.17-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:5c23482860302d0d9883404eaaa54b0615eefa5274f70529703e2c43cc571827"}, + {file = "pycryptodomex-3.17-cp27-cp27mu-manylinux2014_aarch64.whl", hash = "sha256:7a8dc3ee7a99aae202a4db52de5a08aa4d01831eb403c4d21da04ec2f79810db"}, + {file = "pycryptodomex-3.17-cp27-cp27mu-musllinux_1_1_aarch64.whl", hash = "sha256:7cc28dd33f1f3662d6da28ead4f9891035f63f49d30267d3b41194c8778997c8"}, + {file = "pycryptodomex-3.17-cp35-abi3-macosx_10_9_universal2.whl", hash = "sha256:2d4d395f109faba34067a08de36304e846c791808524614c731431ee048fe70a"}, + {file = "pycryptodomex-3.17-cp35-abi3-macosx_10_9_x86_64.whl", hash = "sha256:55eed98b4150a744920597c81b3965b632038781bab8a08a12ea1d004213c600"}, + {file = "pycryptodomex-3.17-cp35-abi3-manylinux2014_aarch64.whl", hash = "sha256:7fa0b52df90343fafe319257b31d909be1d2e8852277fb0376ba89d26d2921db"}, + {file = "pycryptodomex-3.17-cp35-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78f0ddd4adc64baa39b416f3637aaf99f45acb0bcdc16706f0cc7ebfc6f10109"}, + {file = "pycryptodomex-3.17-cp35-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a4fa037078e92c7cc49f6789a8bac3de06856740bb2038d05f2d9a2e4b165d59"}, + {file = "pycryptodomex-3.17-cp35-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:88b0d5bb87eaf2a31e8a759302b89cf30c97f2f8ca7d83b8c9208abe8acb447a"}, + {file = "pycryptodomex-3.17-cp35-abi3-musllinux_1_1_i686.whl", hash = "sha256:6feedf4b0e36b395329b4186a805f60f900129cdf0170e120ecabbfcb763995d"}, + {file = "pycryptodomex-3.17-cp35-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:7a6651a07f67c28b6e978d63aa3a3fccea0feefed9a8453af3f7421a758461b7"}, + {file = "pycryptodomex-3.17-cp35-abi3-win32.whl", hash = "sha256:32e764322e902bbfac49ca1446604d2839381bbbdd5a57920c9daaf2e0b778df"}, + {file = "pycryptodomex-3.17-cp35-abi3-win_amd64.whl", hash = "sha256:4b51e826f0a04d832eda0790bbd0665d9bfe73e5a4d8ea93b6a9b38beeebe935"}, + {file = "pycryptodomex-3.17-pp27-pypy_73-macosx_10_9_x86_64.whl", hash = "sha256:d4cf0128da167562c49b0e034f09e9cedd733997354f2314837c2fa461c87bb1"}, + {file = "pycryptodomex-3.17-pp27-pypy_73-manylinux2010_x86_64.whl", hash = "sha256:c92537b596bd5bffb82f8964cabb9fef1bca8a28a9e0a69ffd3ec92a4a7ad41b"}, + {file = "pycryptodomex-3.17-pp27-pypy_73-win32.whl", hash = "sha256:599bb4ae4bbd614ca05f49bd4e672b7a250b80b13ae1238f05fd0f09d87ed80a"}, + {file = "pycryptodomex-3.17-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:4c4674f4b040321055c596aac926d12f7f6859dfe98cd12f4d9453b43ab6adc8"}, + {file = "pycryptodomex-3.17-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67a3648025e4ddb72d43addab764336ba2e670c8377dba5dd752e42285440d31"}, + {file = "pycryptodomex-3.17-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40e8a11f578bd0851b02719c862d55d3ee18d906c8b68a9c09f8c564d6bb5b92"}, + {file = "pycryptodomex-3.17-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:23d83b610bd97704f0cd3acc48d99b76a15c8c1540d8665c94d514a49905bad7"}, + {file = "pycryptodomex-3.17-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:fd29d35ac80755e5c0a99d96b44fb9abbd7e871849581ea6a4cb826d24267537"}, + {file = "pycryptodomex-3.17-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64b876d57cb894b31056ad8dd6a6ae1099b117ae07a3d39707221133490e5715"}, + {file = "pycryptodomex-3.17-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee8bf4fdcad7d66beb744957db8717afc12d176e3fd9c5d106835133881a049b"}, + {file = "pycryptodomex-3.17-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:c84689c73358dfc23f9fdcff2cb9e7856e65e2ce3b5ed8ff630d4c9bdeb1867b"}, + {file = "pycryptodomex-3.17.tar.gz", hash = "sha256:0af93aad8d62e810247beedef0261c148790c52f3cd33643791cc6396dd217c1"}, +] + +[[package]] +name = "pydyf" +version = "0.6.0" +description = "A low-level PDF generator." +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pydyf-0.6.0-py3-none-any.whl", hash = "sha256:291802bfb7f784134de27404eb592414b3c56a1290231fe6fb548a2559bc936a"}, + {file = "pydyf-0.6.0.tar.gz", hash = "sha256:b44a38855d7e47b740b3cd31ab63a2f5b9b2793931d50b0ccaed3bb7b86912fc"}, +] + +[package.extras] +doc = ["sphinx", "sphinx_rtd_theme"] +test = ["flake8", "isort", "pillow", "pytest"] + +[[package]] +name = "pyee" +version = "9.0.4" +description = "A port of node.js's EventEmitter to python." +category = "main" +optional = false +python-versions = "*" +files = [ + {file = "pyee-9.0.4-py2.py3-none-any.whl", hash = "sha256:9f066570130c554e9cc12de5a9d86f57c7ee47fece163bbdaa3e9c933cfbdfa5"}, + {file = "pyee-9.0.4.tar.gz", hash = "sha256:2770c4928abc721f46b705e6a72b0c59480c4a69c9a83ca0b00bb994f1ea4b32"}, +] + +[package.dependencies] +typing-extensions = "*" + +[[package]] +name = "pyotp" +version = "2.8.0" +description = "Python One Time Password Library" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pyotp-2.8.0-py3-none-any.whl", hash = "sha256:889d037fdde6accad28531fc62a790f089e5dfd5b638773e9ee004cce074a2e5"}, + {file = "pyotp-2.8.0.tar.gz", hash = "sha256:c2f5e17d9da92d8ec1f7de6331ab08116b9115adbabcba6e208d46fc49a98c5a"}, +] + +[[package]] +name = "pyphen" +version = "0.14.0" +description = "Pure Python module to hyphenate text" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pyphen-0.14.0-py3-none-any.whl", hash = "sha256:414c9355958ca3c6a3ff233f65678c245b8ecb56418fb291e2b93499d61cd510"}, + {file = "pyphen-0.14.0.tar.gz", hash = "sha256:596c8b3be1c1a70411ba5f6517d9ccfe3083c758ae2b94a45f2707346d8e66fa"}, +] + +[package.extras] +doc = ["sphinx", "sphinx_rtd_theme"] +test = ["flake8", "isort", "pytest"] + +[[package]] +name = "pypng" +version = "0.20220715.0" +description = "Pure Python library for saving and loading PNG images" +category = "main" +optional = false +python-versions = "*" +files = [ + {file = "pypng-0.20220715.0-py3-none-any.whl", hash = "sha256:4a43e969b8f5aaafb2a415536c1a8ec7e341cd6a3f957fd5b5f32a4cfeed902c"}, + {file = "pypng-0.20220715.0.tar.gz", hash = "sha256:739c433ba96f078315de54c0db975aee537cbc3e1d0ae4ed9aab0ca1e427e2c1"}, +] + +[[package]] +name = "pyrsistent" +version = "0.19.3" +description = "Persistent/Functional/Immutable data structures" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pyrsistent-0.19.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:20460ac0ea439a3e79caa1dbd560344b64ed75e85d8703943e0b66c2a6150e4a"}, + {file = "pyrsistent-0.19.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4c18264cb84b5e68e7085a43723f9e4c1fd1d935ab240ce02c0324a8e01ccb64"}, + {file = "pyrsistent-0.19.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4b774f9288dda8d425adb6544e5903f1fb6c273ab3128a355c6b972b7df39dcf"}, + {file = "pyrsistent-0.19.3-cp310-cp310-win32.whl", hash = "sha256:5a474fb80f5e0d6c9394d8db0fc19e90fa540b82ee52dba7d246a7791712f74a"}, + {file = "pyrsistent-0.19.3-cp310-cp310-win_amd64.whl", hash = "sha256:49c32f216c17148695ca0e02a5c521e28a4ee6c5089f97e34fe24163113722da"}, + {file = "pyrsistent-0.19.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f0774bf48631f3a20471dd7c5989657b639fd2d285b861237ea9e82c36a415a9"}, + {file = "pyrsistent-0.19.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ab2204234c0ecd8b9368dbd6a53e83c3d4f3cab10ecaf6d0e772f456c442393"}, + {file = "pyrsistent-0.19.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e42296a09e83028b3476f7073fcb69ffebac0e66dbbfd1bd847d61f74db30f19"}, + {file = "pyrsistent-0.19.3-cp311-cp311-win32.whl", hash = "sha256:64220c429e42a7150f4bfd280f6f4bb2850f95956bde93c6fda1b70507af6ef3"}, + {file = "pyrsistent-0.19.3-cp311-cp311-win_amd64.whl", hash = "sha256:016ad1afadf318eb7911baa24b049909f7f3bb2c5b1ed7b6a8f21db21ea3faa8"}, + {file = "pyrsistent-0.19.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c4db1bd596fefd66b296a3d5d943c94f4fac5bcd13e99bffe2ba6a759d959a28"}, + {file = "pyrsistent-0.19.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aeda827381f5e5d65cced3024126529ddc4289d944f75e090572c77ceb19adbf"}, + {file = "pyrsistent-0.19.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:42ac0b2f44607eb92ae88609eda931a4f0dfa03038c44c772e07f43e738bcac9"}, + {file = "pyrsistent-0.19.3-cp37-cp37m-win32.whl", hash = "sha256:e8f2b814a3dc6225964fa03d8582c6e0b6650d68a232df41e3cc1b66a5d2f8d1"}, + {file = "pyrsistent-0.19.3-cp37-cp37m-win_amd64.whl", hash = "sha256:c9bb60a40a0ab9aba40a59f68214eed5a29c6274c83b2cc206a359c4a89fa41b"}, + {file = "pyrsistent-0.19.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:a2471f3f8693101975b1ff85ffd19bb7ca7dd7c38f8a81701f67d6b4f97b87d8"}, + {file = "pyrsistent-0.19.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc5d149f31706762c1f8bda2e8c4f8fead6e80312e3692619a75301d3dbb819a"}, + {file = "pyrsistent-0.19.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3311cb4237a341aa52ab8448c27e3a9931e2ee09561ad150ba94e4cfd3fc888c"}, + {file = "pyrsistent-0.19.3-cp38-cp38-win32.whl", hash = "sha256:f0e7c4b2f77593871e918be000b96c8107da48444d57005b6a6bc61fb4331b2c"}, + {file = "pyrsistent-0.19.3-cp38-cp38-win_amd64.whl", hash = "sha256:c147257a92374fde8498491f53ffa8f4822cd70c0d85037e09028e478cababb7"}, + {file = "pyrsistent-0.19.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b735e538f74ec31378f5a1e3886a26d2ca6351106b4dfde376a26fc32a044edc"}, + {file = "pyrsistent-0.19.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99abb85579e2165bd8522f0c0138864da97847875ecbd45f3e7e2af569bfc6f2"}, + {file = "pyrsistent-0.19.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3a8cb235fa6d3fd7aae6a4f1429bbb1fec1577d978098da1252f0489937786f3"}, + {file = "pyrsistent-0.19.3-cp39-cp39-win32.whl", hash = "sha256:c74bed51f9b41c48366a286395c67f4e894374306b197e62810e0fdaf2364da2"}, + {file = "pyrsistent-0.19.3-cp39-cp39-win_amd64.whl", hash = "sha256:878433581fc23e906d947a6814336eee031a00e6defba224234169ae3d3d6a98"}, + {file = "pyrsistent-0.19.3-py3-none-any.whl", hash = "sha256:ccf0d6bd208f8111179f0c26fdf84ed7c3891982f2edaeae7422575f47e66b64"}, + {file = "pyrsistent-0.19.3.tar.gz", hash = "sha256:1a2994773706bbb4995c31a97bc94f1418314923bd1048c6d964837040376440"}, +] + +[[package]] +name = "pytest" +version = "7.3.0" +description = "pytest: simple powerful testing with Python" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-7.3.0-py3-none-any.whl", hash = "sha256:933051fa1bfbd38a21e73c3960cebdad4cf59483ddba7696c48509727e17f201"}, + {file = "pytest-7.3.0.tar.gz", hash = "sha256:58ecc27ebf0ea643ebfdf7fb1249335da761a00c9f955bcd922349bcb68ee57d"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=0.12,<2.0" +tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} + +[package.extras] +testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "xmlschema"] + +[[package]] +name = "pytest-cov" +version = "4.0.0" +description = "Pytest plugin for measuring coverage." +category = "main" +optional = false +python-versions = ">=3.6" +files = [ + {file = "pytest-cov-4.0.0.tar.gz", hash = "sha256:996b79efde6433cdbd0088872dbc5fb3ed7fe1578b68cdbba634f14bb8dd0470"}, + {file = "pytest_cov-4.0.0-py3-none-any.whl", hash = "sha256:2feb1b751d66a8bd934e5edfa2e961d11309dc37b73b0eabe73b5945fee20f6b"}, +] + +[package.dependencies] +coverage = {version = ">=5.2.1", extras = ["toml"]} +pytest = ">=4.6" + +[package.extras] +testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"] + +[[package]] +name = "pytest-django" +version = "4.5.2" +description = "A Django plugin for pytest." +category = "main" +optional = false +python-versions = ">=3.5" +files = [ + {file = "pytest-django-4.5.2.tar.gz", hash = "sha256:d9076f759bb7c36939dbdd5ae6633c18edfc2902d1a69fdbefd2426b970ce6c2"}, + {file = "pytest_django-4.5.2-py3-none-any.whl", hash = "sha256:c60834861933773109334fe5a53e83d1ef4828f2203a1d6a0fa9972f4f75ab3e"}, +] + +[package.dependencies] +pytest = ">=5.4.0" + +[package.extras] +docs = ["sphinx", "sphinx-rtd-theme"] +testing = ["Django", "django-configurations (>=2.0)"] + +[[package]] +name = "pytest-xdist" +version = "3.2.1" +description = "pytest xdist plugin for distributed testing, most importantly across multiple CPUs" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-xdist-3.2.1.tar.gz", hash = "sha256:1849bd98d8b242b948e472db7478e090bf3361912a8fed87992ed94085f54727"}, + {file = "pytest_xdist-3.2.1-py3-none-any.whl", hash = "sha256:37290d161638a20b672401deef1cba812d110ac27e35d213f091d15b8beb40c9"}, +] + +[package.dependencies] +execnet = ">=1.1" +pytest = ">=6.2.0" + +[package.extras] +psutil = ["psutil (>=3.0)"] +setproctitle = ["setproctitle"] +testing = ["filelock"] + +[[package]] +name = "python-dateutil" +version = "2.8.2" +description = "Extensions to the standard Python datetime module" +category = "main" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, + {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, +] + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "python-decouple" +version = "3.8" +description = "Strict separation of settings from code." +category = "main" +optional = false +python-versions = "*" +files = [ + {file = "python-decouple-3.8.tar.gz", hash = "sha256:ba6e2657d4f376ecc46f77a3a615e058d93ba5e465c01bbe57289bfb7cce680f"}, + {file = "python_decouple-3.8-py3-none-any.whl", hash = "sha256:d0d45340815b25f4de59c974b855bb38d03151d81b037d9e3f463b0c9f8cbd66"}, +] + +[[package]] +name = "python-gnupg" +version = "0.5.0" +description = "A wrapper for the Gnu Privacy Guard (GPG or GnuPG)" +category = "main" +optional = false +python-versions = "*" +files = [ + {file = "python-gnupg-0.5.0.tar.gz", hash = "sha256:70758e387fc0e0c4badbcb394f61acbe68b34970a8fed7e0f7c89469fe17912a"}, + {file = "python_gnupg-0.5.0-py2.py3-none-any.whl", hash = "sha256:345723a03e67b82aba0ea8ae2328b2e4a3906fbe2c18c4082285c3b01068f270"}, +] + +[[package]] +name = "pytz" +version = "2023.3" +description = "World timezone definitions, modern and historical" +category = "main" +optional = false +python-versions = "*" +files = [ + {file = "pytz-2023.3-py2.py3-none-any.whl", hash = "sha256:a151b3abb88eda1d4e34a9814df37de2a80e301e68ba0fd856fb9b46bfbbbffb"}, + {file = "pytz-2023.3.tar.gz", hash = "sha256:1d8ce29db189191fb55338ee6d0387d82ab59f3d00eac103412d64e0ebd0c588"}, +] + +[[package]] +name = "qrcode" +version = "7.4.2" +description = "QR Code image generator" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "qrcode-7.4.2-py3-none-any.whl", hash = "sha256:581dca7a029bcb2deef5d01068e39093e80ef00b4a61098a2182eac59d01643a"}, + {file = "qrcode-7.4.2.tar.gz", hash = "sha256:9dd969454827e127dbd93696b20747239e6d540e082937c90f14ac95b30f5845"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} +pillow = {version = ">=9.1.0", optional = true, markers = "extra == \"pil\""} +pypng = "*" +typing-extensions = "*" + +[package.extras] +all = ["pillow (>=9.1.0)", "pytest", "pytest-cov", "tox", "zest.releaser[recommended]"] +dev = ["pytest", "pytest-cov", "tox"] +maintainer = ["zest.releaser[recommended]"] +pil = ["pillow (>=9.1.0)"] +test = ["coverage", "pytest"] + +[[package]] +name = "requests" +version = "2.28.2" +description = "Python HTTP for Humans." +category = "main" +optional = false +python-versions = ">=3.7, <4" +files = [ + {file = "requests-2.28.2-py3-none-any.whl", hash = "sha256:64299f4909223da747622c030b781c0d7811e359c37124b4bd368fb8c6518baa"}, + {file = "requests-2.28.2.tar.gz", hash = "sha256:98b1b2782e3c6c4904938b84c0eb932721069dfdb9134313beff7c83c2df24bf"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<1.27" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "rfc3986" +version = "1.5.0" +description = "Validating URI References per RFC 3986" +category = "main" +optional = false +python-versions = "*" +files = [ + {file = "rfc3986-1.5.0-py2.py3-none-any.whl", hash = "sha256:a86d6e1f5b1dc238b218b012df0aa79409667bb209e58da56d0b94704e712a97"}, + {file = "rfc3986-1.5.0.tar.gz", hash = "sha256:270aaf10d87d0d4e095063c65bf3ddbc6ee3d0b226328ce21e036f946e421835"}, +] + +[package.dependencies] +idna = {version = "*", optional = true, markers = "extra == \"idna2008\""} + +[package.extras] +idna2008 = ["idna"] + +[[package]] +name = "s3transfer" +version = "0.6.0" +description = "An Amazon S3 Transfer Manager" +category = "main" +optional = false +python-versions = ">= 3.7" +files = [ + {file = "s3transfer-0.6.0-py3-none-any.whl", hash = "sha256:06176b74f3a15f61f1b4f25a1fc29a4429040b7647133a463da8fa5bd28d5ecd"}, + {file = "s3transfer-0.6.0.tar.gz", hash = "sha256:2ed07d3866f523cc561bf4a00fc5535827981b117dd7876f036b0c1aca42c947"}, +] + +[package.dependencies] +botocore = ">=1.12.36,<2.0a.0" + +[package.extras] +crt = ["botocore[crt] (>=1.20.29,<2.0a.0)"] + +[[package]] +name = "setuptools" +version = "67.6.1" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "setuptools-67.6.1-py3-none-any.whl", hash = "sha256:e728ca814a823bf7bf60162daf9db95b93d532948c4c0bea762ce62f60189078"}, + {file = "setuptools-67.6.1.tar.gz", hash = "sha256:257de92a9d50a60b8e22abfcbb771571fde0dbf3ec234463212027a4eeecbe9a"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8 (<5)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] + +[[package]] +name = "sniffio" +version = "1.3.0" +description = "Sniff out which async library your code is running under" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "sniffio-1.3.0-py3-none-any.whl", hash = "sha256:eecefdce1e5bbfb7ad2eeaabf7c1eeb404d7757c379bd1f7e5cce9d8bf425384"}, + {file = "sniffio-1.3.0.tar.gz", hash = "sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101"}, +] + +[[package]] +name = "sqlparse" +version = "0.4.3" +description = "A non-validating SQL parser." +category = "main" +optional = false +python-versions = ">=3.5" +files = [ + {file = "sqlparse-0.4.3-py3-none-any.whl", hash = "sha256:0323c0ec29cd52bceabc1b4d9d579e311f3e4961b98d174201d5622a23b85e34"}, + {file = "sqlparse-0.4.3.tar.gz", hash = "sha256:69ca804846bb114d2ec380e4360a8a340db83f0ccf3afceeb1404df028f57268"}, +] + +[[package]] +name = "tinycss2" +version = "1.2.1" +description = "A tiny CSS parser" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tinycss2-1.2.1-py3-none-any.whl", hash = "sha256:2b80a96d41e7c3914b8cda8bc7f705a4d9c49275616e886103dd839dfc847847"}, + {file = "tinycss2-1.2.1.tar.gz", hash = "sha256:8cff3a8f066c2ec677c06dbc7b45619804a6938478d9d73c284b29d14ecb0627"}, +] + +[package.dependencies] +webencodings = ">=0.4" + +[package.extras] +doc = ["sphinx", "sphinx_rtd_theme"] +test = ["flake8", "isort", "pytest"] + +[[package]] +name = "tomli" +version = "2.0.1" +description = "A lil' TOML parser" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, + {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, +] + +[[package]] +name = "typing-extensions" +version = "4.5.0" +description = "Backported and Experimental Type Hints for Python 3.7+" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "typing_extensions-4.5.0-py3-none-any.whl", hash = "sha256:fb33085c39dd998ac16d1431ebc293a8b3eedd00fd4a32de0ff79002c19511b4"}, + {file = "typing_extensions-4.5.0.tar.gz", hash = "sha256:5cb5f4a79139d699607b3ef622a1dedafa84e115ab0024e0d9c044a9479ca7cb"}, +] + +[[package]] +name = "tzdata" +version = "2023.3" +description = "Provider of IANA time zone data" +category = "main" +optional = false +python-versions = ">=2" +files = [ + {file = "tzdata-2023.3-py2.py3-none-any.whl", hash = "sha256:7e65763eef3120314099b6939b5546db7adce1e7d6f2e179e3df563c70511eda"}, + {file = "tzdata-2023.3.tar.gz", hash = "sha256:11ef1e08e54acb0d4f95bdb1be05da659673de4acbd21bf9c69e94cc5e907a3a"}, +] + +[[package]] +name = "urllib3" +version = "1.26.15" +description = "HTTP library with thread-safe connection pooling, file post, and more." +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +files = [ + {file = "urllib3-1.26.15-py2.py3-none-any.whl", hash = "sha256:aa751d169e23c7479ce47a0cb0da579e3ede798f994f5816a74e4f4500dcea42"}, + {file = "urllib3-1.26.15.tar.gz", hash = "sha256:8a388717b9476f934a21484e8c8e61875ab60644d29b9b39e11e4b9dc1c6b305"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] +secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] +socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] + +[[package]] +name = "uvicorn" +version = "0.21.1" +description = "The lightning-fast ASGI server." +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "uvicorn-0.21.1-py3-none-any.whl", hash = "sha256:e47cac98a6da10cd41e6fd036d472c6f58ede6c5dbee3dbee3ef7a100ed97742"}, + {file = "uvicorn-0.21.1.tar.gz", hash = "sha256:0fac9cb342ba099e0d582966005f3fdba5b0290579fed4a6266dc702ca7bb032"}, +] + +[package.dependencies] +click = ">=7.0" +h11 = ">=0.8" + +[package.extras] +standard = ["colorama (>=0.4)", "httptools (>=0.5.0)", "python-dotenv (>=0.13)", "pyyaml (>=5.1)", "uvloop (>=0.14.0,!=0.15.0,!=0.15.1)", "watchfiles (>=0.13)", "websockets (>=10.4)"] + +[[package]] +name = "vine" +version = "5.0.0" +description = "Promises, promises, promises." +category = "main" +optional = false +python-versions = ">=3.6" +files = [ + {file = "vine-5.0.0-py2.py3-none-any.whl", hash = "sha256:4c9dceab6f76ed92105027c49c823800dd33cacce13bdedc5b914e3514b7fb30"}, + {file = "vine-5.0.0.tar.gz", hash = "sha256:7d3b1624a953da82ef63462013bbd271d3eb75751489f9807598e8f340bd637e"}, +] + +[[package]] +name = "watchdog" +version = "3.0.0" +description = "Filesystem events monitoring" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "watchdog-3.0.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:336adfc6f5cc4e037d52db31194f7581ff744b67382eb6021c868322e32eef41"}, + {file = "watchdog-3.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a70a8dcde91be523c35b2bf96196edc5730edb347e374c7de7cd20c43ed95397"}, + {file = "watchdog-3.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:adfdeab2da79ea2f76f87eb42a3ab1966a5313e5a69a0213a3cc06ef692b0e96"}, + {file = "watchdog-3.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2b57a1e730af3156d13b7fdddfc23dea6487fceca29fc75c5a868beed29177ae"}, + {file = "watchdog-3.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7ade88d0d778b1b222adebcc0927428f883db07017618a5e684fd03b83342bd9"}, + {file = "watchdog-3.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7e447d172af52ad204d19982739aa2346245cc5ba6f579d16dac4bfec226d2e7"}, + {file = "watchdog-3.0.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:9fac43a7466eb73e64a9940ac9ed6369baa39b3bf221ae23493a9ec4d0022674"}, + {file = "watchdog-3.0.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:8ae9cda41fa114e28faf86cb137d751a17ffd0316d1c34ccf2235e8a84365c7f"}, + {file = "watchdog-3.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:25f70b4aa53bd743729c7475d7ec41093a580528b100e9a8c5b5efe8899592fc"}, + {file = "watchdog-3.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4f94069eb16657d2c6faada4624c39464f65c05606af50bb7902e036e3219be3"}, + {file = "watchdog-3.0.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7c5f84b5194c24dd573fa6472685b2a27cc5a17fe5f7b6fd40345378ca6812e3"}, + {file = "watchdog-3.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3aa7f6a12e831ddfe78cdd4f8996af9cf334fd6346531b16cec61c3b3c0d8da0"}, + {file = "watchdog-3.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:233b5817932685d39a7896b1090353fc8efc1ef99c9c054e46c8002561252fb8"}, + {file = "watchdog-3.0.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:13bbbb462ee42ec3c5723e1205be8ced776f05b100e4737518c67c8325cf6100"}, + {file = "watchdog-3.0.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:8f3ceecd20d71067c7fd4c9e832d4e22584318983cabc013dbf3f70ea95de346"}, + {file = "watchdog-3.0.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:c9d8c8ec7efb887333cf71e328e39cffbf771d8f8f95d308ea4125bf5f90ba64"}, + {file = "watchdog-3.0.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:0e06ab8858a76e1219e68c7573dfeba9dd1c0219476c5a44d5333b01d7e1743a"}, + {file = "watchdog-3.0.0-py3-none-manylinux2014_armv7l.whl", hash = "sha256:d00e6be486affb5781468457b21a6cbe848c33ef43f9ea4a73b4882e5f188a44"}, + {file = "watchdog-3.0.0-py3-none-manylinux2014_i686.whl", hash = "sha256:c07253088265c363d1ddf4b3cdb808d59a0468ecd017770ed716991620b8f77a"}, + {file = "watchdog-3.0.0-py3-none-manylinux2014_ppc64.whl", hash = "sha256:5113334cf8cf0ac8cd45e1f8309a603291b614191c9add34d33075727a967709"}, + {file = "watchdog-3.0.0-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:51f90f73b4697bac9c9a78394c3acbbd331ccd3655c11be1a15ae6fe289a8c83"}, + {file = "watchdog-3.0.0-py3-none-manylinux2014_s390x.whl", hash = "sha256:ba07e92756c97e3aca0912b5cbc4e5ad802f4557212788e72a72a47ff376950d"}, + {file = "watchdog-3.0.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:d429c2430c93b7903914e4db9a966c7f2b068dd2ebdd2fa9b9ce094c7d459f33"}, + {file = "watchdog-3.0.0-py3-none-win32.whl", hash = "sha256:3ed7c71a9dccfe838c2f0b6314ed0d9b22e77d268c67e015450a29036a81f60f"}, + {file = "watchdog-3.0.0-py3-none-win_amd64.whl", hash = "sha256:4c9956d27be0bb08fc5f30d9d0179a855436e655f046d288e2bcc11adfae893c"}, + {file = "watchdog-3.0.0-py3-none-win_ia64.whl", hash = "sha256:5d9f3a10e02d7371cd929b5d8f11e87d4bad890212ed3901f9b4d68767bee759"}, + {file = "watchdog-3.0.0.tar.gz", hash = "sha256:4d98a320595da7a7c5a18fc48cb633c2e73cda78f93cac2ef42d42bf609a33f9"}, +] + +[package.extras] +watchmedo = ["PyYAML (>=3.10)"] + +[[package]] +name = "wcwidth" +version = "0.2.6" +description = "Measures the displayed width of unicode strings in a terminal" +category = "main" +optional = false +python-versions = "*" +files = [ + {file = "wcwidth-0.2.6-py2.py3-none-any.whl", hash = "sha256:795b138f6875577cd91bba52baf9e445cd5118fd32723b460e30a0af30ea230e"}, + {file = "wcwidth-0.2.6.tar.gz", hash = "sha256:a5220780a404dbe3353789870978e472cfe477761f06ee55077256e509b156d0"}, +] + +[[package]] +name = "weasyprint" +version = "57.2" +description = "The Awesome Document Factory" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "weasyprint-57.2-py3-none-any.whl", hash = "sha256:685692e7a2b4aec21010a61c1e8f1d6cd611dd46509fa7068fcca3c8e59a94f5"}, + {file = "weasyprint-57.2.tar.gz", hash = "sha256:b8e9ef2dcbcfbc82e99215acfd68f947b2b5f1999b5b156db75fabe380ba7e9a"}, +] + +[package.dependencies] +cffi = ">=0.6" +cssselect2 = ">=0.1" +fonttools = {version = ">=4.0.0", extras = ["woff"]} +html5lib = ">=1.1" +Pillow = ">=9.1.0" +pydyf = ">=0.5.0" +Pyphen = ">=0.9.1" +tinycss2 = ">=1.0.0" + +[package.extras] +doc = ["sphinx", "sphinx_rtd_theme"] +test = ["flake8", "isort", "pytest"] + +[[package]] +name = "webencodings" +version = "0.5.1" +description = "Character encoding aliases for legacy web content" +category = "main" +optional = false +python-versions = "*" +files = [ + {file = "webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78"}, + {file = "webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923"}, +] + +[[package]] +name = "whitenoise" +version = "6.4.0" +description = "Radically simplified static file serving for WSGI applications" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "whitenoise-6.4.0-py3-none-any.whl", hash = "sha256:599dc6ca57e48929dfeffb2e8e187879bfe2aed0d49ca419577005b7f2cc930b"}, + {file = "whitenoise-6.4.0.tar.gz", hash = "sha256:a02d6660ad161ff17e3042653c8e3f5ecbb2a2481a006bde125b9efb9a30113a"}, +] + +[package.extras] +brotli = ["Brotli"] + +[[package]] +name = "wrapt" +version = "1.15.0" +description = "Module for decorators, wrappers and monkey patching." +category = "main" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" +files = [ + {file = "wrapt-1.15.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:ca1cccf838cd28d5a0883b342474c630ac48cac5df0ee6eacc9c7290f76b11c1"}, + {file = "wrapt-1.15.0-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:e826aadda3cae59295b95343db8f3d965fb31059da7de01ee8d1c40a60398b29"}, + {file = "wrapt-1.15.0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:5fc8e02f5984a55d2c653f5fea93531e9836abbd84342c1d1e17abc4a15084c2"}, + {file = "wrapt-1.15.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:96e25c8603a155559231c19c0349245eeb4ac0096fe3c1d0be5c47e075bd4f46"}, + {file = "wrapt-1.15.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:40737a081d7497efea35ab9304b829b857f21558acfc7b3272f908d33b0d9d4c"}, + {file = "wrapt-1.15.0-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:f87ec75864c37c4c6cb908d282e1969e79763e0d9becdfe9fe5473b7bb1e5f09"}, + {file = "wrapt-1.15.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:1286eb30261894e4c70d124d44b7fd07825340869945c79d05bda53a40caa079"}, + {file = "wrapt-1.15.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:493d389a2b63c88ad56cdc35d0fa5752daac56ca755805b1b0c530f785767d5e"}, + {file = "wrapt-1.15.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:58d7a75d731e8c63614222bcb21dd992b4ab01a399f1f09dd82af17bbfc2368a"}, + {file = "wrapt-1.15.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:21f6d9a0d5b3a207cdf7acf8e58d7d13d463e639f0c7e01d82cdb671e6cb7923"}, + {file = "wrapt-1.15.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ce42618f67741d4697684e501ef02f29e758a123aa2d669e2d964ff734ee00ee"}, + {file = "wrapt-1.15.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41d07d029dd4157ae27beab04d22b8e261eddfc6ecd64ff7000b10dc8b3a5727"}, + {file = "wrapt-1.15.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:54accd4b8bc202966bafafd16e69da9d5640ff92389d33d28555c5fd4f25ccb7"}, + {file = "wrapt-1.15.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2fbfbca668dd15b744418265a9607baa970c347eefd0db6a518aaf0cfbd153c0"}, + {file = "wrapt-1.15.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:76e9c727a874b4856d11a32fb0b389afc61ce8aaf281ada613713ddeadd1cfec"}, + {file = "wrapt-1.15.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e20076a211cd6f9b44a6be58f7eeafa7ab5720eb796975d0c03f05b47d89eb90"}, + {file = "wrapt-1.15.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a74d56552ddbde46c246b5b89199cb3fd182f9c346c784e1a93e4dc3f5ec9975"}, + {file = "wrapt-1.15.0-cp310-cp310-win32.whl", hash = "sha256:26458da5653aa5b3d8dc8b24192f574a58984c749401f98fff994d41d3f08da1"}, + {file = "wrapt-1.15.0-cp310-cp310-win_amd64.whl", hash = "sha256:75760a47c06b5974aa5e01949bf7e66d2af4d08cb8c1d6516af5e39595397f5e"}, + {file = "wrapt-1.15.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ba1711cda2d30634a7e452fc79eabcadaffedf241ff206db2ee93dd2c89a60e7"}, + {file = "wrapt-1.15.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:56374914b132c702aa9aa9959c550004b8847148f95e1b824772d453ac204a72"}, + {file = "wrapt-1.15.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a89ce3fd220ff144bd9d54da333ec0de0399b52c9ac3d2ce34b569cf1a5748fb"}, + {file = "wrapt-1.15.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3bbe623731d03b186b3d6b0d6f51865bf598587c38d6f7b0be2e27414f7f214e"}, + {file = "wrapt-1.15.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3abbe948c3cbde2689370a262a8d04e32ec2dd4f27103669a45c6929bcdbfe7c"}, + {file = "wrapt-1.15.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b67b819628e3b748fd3c2192c15fb951f549d0f47c0449af0764d7647302fda3"}, + {file = "wrapt-1.15.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:7eebcdbe3677e58dd4c0e03b4f2cfa346ed4049687d839adad68cc38bb559c92"}, + {file = "wrapt-1.15.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:74934ebd71950e3db69960a7da29204f89624dde411afbfb3b4858c1409b1e98"}, + {file = "wrapt-1.15.0-cp311-cp311-win32.whl", hash = "sha256:bd84395aab8e4d36263cd1b9308cd504f6cf713b7d6d3ce25ea55670baec5416"}, + {file = "wrapt-1.15.0-cp311-cp311-win_amd64.whl", hash = "sha256:a487f72a25904e2b4bbc0817ce7a8de94363bd7e79890510174da9d901c38705"}, + {file = "wrapt-1.15.0-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:4ff0d20f2e670800d3ed2b220d40984162089a6e2c9646fdb09b85e6f9a8fc29"}, + {file = "wrapt-1.15.0-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9ed6aa0726b9b60911f4aed8ec5b8dd7bf3491476015819f56473ffaef8959bd"}, + {file = "wrapt-1.15.0-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:896689fddba4f23ef7c718279e42f8834041a21342d95e56922e1c10c0cc7afb"}, + {file = "wrapt-1.15.0-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:75669d77bb2c071333417617a235324a1618dba66f82a750362eccbe5b61d248"}, + {file = "wrapt-1.15.0-cp35-cp35m-win32.whl", hash = "sha256:fbec11614dba0424ca72f4e8ba3c420dba07b4a7c206c8c8e4e73f2e98f4c559"}, + {file = "wrapt-1.15.0-cp35-cp35m-win_amd64.whl", hash = "sha256:fd69666217b62fa5d7c6aa88e507493a34dec4fa20c5bd925e4bc12fce586639"}, + {file = "wrapt-1.15.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:b0724f05c396b0a4c36a3226c31648385deb6a65d8992644c12a4963c70326ba"}, + {file = "wrapt-1.15.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bbeccb1aa40ab88cd29e6c7d8585582c99548f55f9b2581dfc5ba68c59a85752"}, + {file = "wrapt-1.15.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:38adf7198f8f154502883242f9fe7333ab05a5b02de7d83aa2d88ea621f13364"}, + {file = "wrapt-1.15.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:578383d740457fa790fdf85e6d346fda1416a40549fe8db08e5e9bd281c6a475"}, + {file = "wrapt-1.15.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:a4cbb9ff5795cd66f0066bdf5947f170f5d63a9274f99bdbca02fd973adcf2a8"}, + {file = "wrapt-1.15.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:af5bd9ccb188f6a5fdda9f1f09d9f4c86cc8a539bd48a0bfdc97723970348418"}, + {file = "wrapt-1.15.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:b56d5519e470d3f2fe4aa7585f0632b060d532d0696c5bdfb5e8319e1d0f69a2"}, + {file = "wrapt-1.15.0-cp36-cp36m-win32.whl", hash = "sha256:77d4c1b881076c3ba173484dfa53d3582c1c8ff1f914c6461ab70c8428b796c1"}, + {file = "wrapt-1.15.0-cp36-cp36m-win_amd64.whl", hash = "sha256:077ff0d1f9d9e4ce6476c1a924a3332452c1406e59d90a2cf24aeb29eeac9420"}, + {file = "wrapt-1.15.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:5c5aa28df055697d7c37d2099a7bc09f559d5053c3349b1ad0c39000e611d317"}, + {file = "wrapt-1.15.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3a8564f283394634a7a7054b7983e47dbf39c07712d7b177b37e03f2467a024e"}, + {file = "wrapt-1.15.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:780c82a41dc493b62fc5884fb1d3a3b81106642c5c5c78d6a0d4cbe96d62ba7e"}, + {file = "wrapt-1.15.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e169e957c33576f47e21864cf3fc9ff47c223a4ebca8960079b8bd36cb014fd0"}, + {file = "wrapt-1.15.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:b02f21c1e2074943312d03d243ac4388319f2456576b2c6023041c4d57cd7019"}, + {file = "wrapt-1.15.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f2e69b3ed24544b0d3dbe2c5c0ba5153ce50dcebb576fdc4696d52aa22db6034"}, + {file = "wrapt-1.15.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d787272ed958a05b2c86311d3a4135d3c2aeea4fc655705f074130aa57d71653"}, + {file = "wrapt-1.15.0-cp37-cp37m-win32.whl", hash = "sha256:02fce1852f755f44f95af51f69d22e45080102e9d00258053b79367d07af39c0"}, + {file = "wrapt-1.15.0-cp37-cp37m-win_amd64.whl", hash = "sha256:abd52a09d03adf9c763d706df707c343293d5d106aea53483e0ec8d9e310ad5e"}, + {file = "wrapt-1.15.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cdb4f085756c96a3af04e6eca7f08b1345e94b53af8921b25c72f096e704e145"}, + {file = "wrapt-1.15.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:230ae493696a371f1dbffaad3dafbb742a4d27a0afd2b1aecebe52b740167e7f"}, + {file = "wrapt-1.15.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63424c681923b9f3bfbc5e3205aafe790904053d42ddcc08542181a30a7a51bd"}, + {file = "wrapt-1.15.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d6bcbfc99f55655c3d93feb7ef3800bd5bbe963a755687cbf1f490a71fb7794b"}, + {file = "wrapt-1.15.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c99f4309f5145b93eca6e35ac1a988f0dc0a7ccf9ccdcd78d3c0adf57224e62f"}, + {file = "wrapt-1.15.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b130fe77361d6771ecf5a219d8e0817d61b236b7d8b37cc045172e574ed219e6"}, + {file = "wrapt-1.15.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:96177eb5645b1c6985f5c11d03fc2dbda9ad24ec0f3a46dcce91445747e15094"}, + {file = "wrapt-1.15.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5fe3e099cf07d0fb5a1e23d399e5d4d1ca3e6dfcbe5c8570ccff3e9208274f7"}, + {file = "wrapt-1.15.0-cp38-cp38-win32.whl", hash = "sha256:abd8f36c99512755b8456047b7be10372fca271bf1467a1caa88db991e7c421b"}, + {file = "wrapt-1.15.0-cp38-cp38-win_amd64.whl", hash = "sha256:b06fa97478a5f478fb05e1980980a7cdf2712015493b44d0c87606c1513ed5b1"}, + {file = "wrapt-1.15.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2e51de54d4fb8fb50d6ee8327f9828306a959ae394d3e01a1ba8b2f937747d86"}, + {file = "wrapt-1.15.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0970ddb69bba00670e58955f8019bec4a42d1785db3faa043c33d81de2bf843c"}, + {file = "wrapt-1.15.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76407ab327158c510f44ded207e2f76b657303e17cb7a572ffe2f5a8a48aa04d"}, + {file = "wrapt-1.15.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cd525e0e52a5ff16653a3fc9e3dd827981917d34996600bbc34c05d048ca35cc"}, + {file = "wrapt-1.15.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d37ac69edc5614b90516807de32d08cb8e7b12260a285ee330955604ed9dd29"}, + {file = "wrapt-1.15.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:078e2a1a86544e644a68422f881c48b84fef6d18f8c7a957ffd3f2e0a74a0d4a"}, + {file = "wrapt-1.15.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:2cf56d0e237280baed46f0b5316661da892565ff58309d4d2ed7dba763d984b8"}, + {file = "wrapt-1.15.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7dc0713bf81287a00516ef43137273b23ee414fe41a3c14be10dd95ed98a2df9"}, + {file = "wrapt-1.15.0-cp39-cp39-win32.whl", hash = "sha256:46ed616d5fb42f98630ed70c3529541408166c22cdfd4540b88d5f21006b0eff"}, + {file = "wrapt-1.15.0-cp39-cp39-win_amd64.whl", hash = "sha256:eef4d64c650f33347c1f9266fa5ae001440b232ad9b98f1f43dfe7a79435c0a6"}, + {file = "wrapt-1.15.0-py3-none-any.whl", hash = "sha256:64b1df0f83706b4ef4cfb4fb0e4c2669100fd7ecacfb59e091fad300d4e04640"}, + {file = "wrapt-1.15.0.tar.gz", hash = "sha256:d06730c6aed78cee4126234cf2d071e01b44b915e725a6cb439a879ec9754a3a"}, +] + +[[package]] +name = "zipstream-new" +version = "1.1.8" +description = "Zipfile generator that takes input files as well as streams" +category = "main" +optional = false +python-versions = "*" +files = [ + {file = "zipstream-new-1.1.8.tar.gz", hash = "sha256:b031fe181b94e51678389d26b174bc76382605a078d7d5d8f5beae083f111c76"}, + {file = "zipstream_new-1.1.8-py3-none-any.whl", hash = "sha256:0662eb3ebe764fa168a5883cd8819ef83b94bd9e39955537188459d2264a7f60"}, +] + +[[package]] +name = "zopfli" +version = "0.2.2" +description = "Zopfli module for python" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "zopfli-0.2.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e340851bbdea91408e6713748b4082c2e464a80eef9f9a69ff5a20e5e008cace"}, + {file = "zopfli-0.2.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:093a58fdf1e592f01233fc16900ceb69f27f19b347deb49544df96d912664f6d"}, + {file = "zopfli-0.2.2-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:bd7b174fef2366723f57d16f3e8d157f9cbb53b1c555e2a1f99b6290de94ca28"}, + {file = "zopfli-0.2.2-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:a712fdc3dab61037fab549ff72539b7968ffda567e5460aa2518e40a13b4dd38"}, + {file = "zopfli-0.2.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:02a0c37826c0b28454865fdf664d54627fe8d90fac6f7325b5215719e8be09ca"}, + {file = "zopfli-0.2.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:20b02b5c9f1cfbcfc154e54981d1b9f9581ca1f54ece39c6aed52f7166a6f081"}, + {file = "zopfli-0.2.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:01e82e6e31cfcb2eb7e3d6d72d0a498d150e3c3112cae3b5ab88ca3efedbc162"}, + {file = "zopfli-0.2.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8c2e6d0618e1ffc27a1eaf66662f96e0bc8a4c1926fc139a0f544b93a1e1b451"}, + {file = "zopfli-0.2.2-cp310-cp310-win32.whl", hash = "sha256:e0014bd1b9703c9cdfa7f88bc793600aee5f858dd2f18105b49a70e66b9f1b1d"}, + {file = "zopfli-0.2.2-cp310-cp310-win_amd64.whl", hash = "sha256:13487519e6ee8ed36c4a197d146d8ae60d418172d85342d3cdd28f38f905a705"}, + {file = "zopfli-0.2.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:fa589e4d2b54d95447cb79a6053050fc7218f61594085ca54672cb045ba0f7f8"}, + {file = "zopfli-0.2.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:bd661f0894a4e4d78ce4c07e2625b0fd17ae172040ce57c5e1c32316a16727c9"}, + {file = "zopfli-0.2.2-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ed2137d64470469c825713aac486aacc9e2c46e300b92cb39ae47f4024b86b2e"}, + {file = "zopfli-0.2.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69411d85ed25ea25f480410048b397abc4c98562ce3533ecc3ce65358acc52dd"}, + {file = "zopfli-0.2.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ed09efbcdc8bce5b5ff052ffd1edabdabd7a43e340ee63f8d5e81644dc50110f"}, + {file = "zopfli-0.2.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:9de02f057ed153c9f523e72a366b8f48e2634c9f867e7109232415efe11d36c2"}, + {file = "zopfli-0.2.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:2bafc105065fae35bd96100a5901a7d816f1904eb732d94b6d46cf480ead581b"}, + {file = "zopfli-0.2.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:898daa330577101aab03806231e9b29990ebaa34f275d9df2045d0551edd1e87"}, + {file = "zopfli-0.2.2-cp311-cp311-win32.whl", hash = "sha256:b5b2e2ac397a71772fbbdc5b31fa8257e46f2a1e718e5c17c08db3dac7c739e4"}, + {file = "zopfli-0.2.2-cp311-cp311-win_amd64.whl", hash = "sha256:259f15d65e554b16a6086bfe96dd7bd175467eb3d024b9dbce41323b5861a285"}, + {file = "zopfli-0.2.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:f6f62330a3999522282d0cc6370682d86985ac66edc2799f5934e309d8d615f1"}, + {file = "zopfli-0.2.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e05a2506e8a8d44835a11d5f1c296035d65d0f7053f77730ce99066acaf09af"}, + {file = "zopfli-0.2.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:573ae7e1cb4f0c9a248c203440950b24b213c13b5169e169a884c777ad9054e4"}, + {file = "zopfli-0.2.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:47d9ec1ca32240fae8b9b41e90d6483f4d0f2946de4785f54f4f57afe83040be"}, + {file = "zopfli-0.2.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:da3d682956e447f61ad23f66f49f20f189d12b15857a2e524497793ae54027c4"}, + {file = "zopfli-0.2.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:58ddab571a77988bc585e1a6fa46f9848b45880fa74bc832b135cbc22d22a619"}, + {file = "zopfli-0.2.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:00a66579f2e663cd7eabad71f5b114abf442f4816fdaf251b4b495aa9d016a67"}, + {file = "zopfli-0.2.2-cp37-cp37m-win32.whl", hash = "sha256:c49e29739508a7142fa1437256a7bf631926e70e68ca50a6bd62ee4e80050acc"}, + {file = "zopfli-0.2.2-cp37-cp37m-win_amd64.whl", hash = "sha256:8d6d02e1a962995c380411cc4ec81d1f4fc60c293764f8acd859eb12bfdf7190"}, + {file = "zopfli-0.2.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:a568f09aa932a04073a4147e2db5db2adfccd864326477d58d4ffc80550531c7"}, + {file = "zopfli-0.2.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c27af5f9a6538891af7257e104a37affbe26383fc0bd57b52c05fe2f45292dc9"}, + {file = "zopfli-0.2.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5aea70d124ff9c0a33078f1451dfa2dd29eba53ea0627acb88783a19f0692044"}, + {file = "zopfli-0.2.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3b58455a9d23f6d45f2686891d7bec916132aed335052459bbed36a2b9437c1d"}, + {file = "zopfli-0.2.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:7146c58c5ff604e7798d4c015c0ca8da53128ca29d0f1bccb48c785953451cd4"}, + {file = "zopfli-0.2.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:81c2c1216814a4f2f9abcd49fd4b70f05266d3621ef3b21e4b1b7bf535876fc1"}, + {file = "zopfli-0.2.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:468c4317aca9411b576a27f6f26217bdd30e04fdfc420d3d7e8b6f1fef4e9886"}, + {file = "zopfli-0.2.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:691d4e0fd04e85ee5f59e019ed0da16d8f544904d3879a34986722d87a90c536"}, + {file = "zopfli-0.2.2-cp38-cp38-win32.whl", hash = "sha256:2b4b5ae717dc2c164d9fae6134eac285915aaef77723f8cf9765555ac926f6d0"}, + {file = "zopfli-0.2.2-cp38-cp38-win_amd64.whl", hash = "sha256:c9d444b26317f3c40909d555f9c611ef8bcac6edf016af7709a32ad5848b481d"}, + {file = "zopfli-0.2.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:db004eb8ee7aab9c86647b92e1e570edb6fec9bd384a7a4f24e1f6529db34ac3"}, + {file = "zopfli-0.2.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a989893b20381be266a2385f4a1b77316e0df4258ee048bb190c2e426e39cbc8"}, + {file = "zopfli-0.2.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e1689ced6f6ebf674281d85c143529232aa039c4e8d814bf3b425f1793bfdeb4"}, + {file = "zopfli-0.2.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4fcc34fd420ec5750f9981db43ee9a4f2e2bfabdc52128b243fca1fd9b99e13d"}, + {file = "zopfli-0.2.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:33c876d311c5edc700ccf75a22d03dcda1efa85b43f733913a99b5f3d1eb4ea7"}, + {file = "zopfli-0.2.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:3df7ae869dcb8e0bb3292e6ab041d16323af37d87c8dca1dde7b2fe5cb6b7cf7"}, + {file = "zopfli-0.2.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:4cbc6192bf24425c757281c7c864012e51d29095771f805ea3040702c10c3d7a"}, + {file = "zopfli-0.2.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8563e639534201a14c109c54965f8a71574d8cf525a0a521d310e044d81fece9"}, + {file = "zopfli-0.2.2-cp39-cp39-win32.whl", hash = "sha256:4b471e3f58bd7b77cfc7a29b28a10c094ea4cd9ee14c54fbc4f1150680aac68c"}, + {file = "zopfli-0.2.2-cp39-cp39-win_amd64.whl", hash = "sha256:1e3aefca003cbb41a6dcdd61f920c807eea99d0196aff488f02275c3b3c400a9"}, + {file = "zopfli-0.2.2-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:17694cfda43fb2af18b571bfc60426fb67d7701d75cc1f0e634ad0a19ffaebdd"}, + {file = "zopfli-0.2.2-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:71eafbe6ce975f77a5247bf44fdfdb78e846a76a3391de4d75cc68ea74542048"}, + {file = "zopfli-0.2.2-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:a85d500cfa06f127e441e90804556a3872ea329e065d2f0ee97922d03afc9885"}, + {file = "zopfli-0.2.2-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:4205bb3aea31f22cd52bd1a9c298944591bfd9b6f92ede0af99127750b27eb3b"}, + {file = "zopfli-0.2.2-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:ec845584fcdc10763d869b40b742fe0e2684adf3ca275ec997b9447ef5fe3ad9"}, + {file = "zopfli-0.2.2-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:1360df0d423c897164a3344ed6635f7fd098cb4ce59c6d45b4275b93727d57f6"}, + {file = "zopfli-0.2.2-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:537da300842f06470c036d6d7e7fc9e63713735ee0b96ee97a750d1ec0399639"}, + {file = "zopfli-0.2.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:2e5b7874dfe228715569940561cdc0485ed8cbfd2c76eebc4e54719e0c9cc494"}, + {file = "zopfli-0.2.2-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:8c1b316a5eed59a9a49a886aeeaf3b7233627a1013b10f230817870278e15789"}, + {file = "zopfli-0.2.2-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2ce7cbe8f6fff013aa695d5d92ac2b1fd46fd012858109fdde9824759b566685"}, + {file = "zopfli-0.2.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2d5e81fed8ac2d71832177ab06385f032cc3a37eec76537d105b1018b7fef0ff"}, + {file = "zopfli-0.2.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2ea855a740ee766c872cbf84abdcc1b6a51b5dbdeb6ace995f36c934b3846467"}, + {file = "zopfli-0.2.2.zip", hash = "sha256:2d49db7540d9991976af464ebc1b9ed12988c04d90691bcb51dc4a373a9e2afc"}, +] + +[package.extras] +test = ["pytest"] + +[metadata] +lock-version = "2.0" +python-versions = "~3.10" +content-hash = "ff64a49ae635ff9df41bd009b770a32a005be2d9d7cf7a420f25257c35c25efb" diff --git a/api/pyproject.toml b/api/pyproject.toml new file mode 100644 index 0000000..81292c5 --- /dev/null +++ b/api/pyproject.toml @@ -0,0 +1,61 @@ +[tool.poetry] +name = "reportcreator-api" +version = "0.1.0" +description = "Pentest report creator" +authors = [] +packages = [{include = "reportcreator_api"}] + +[tool.poetry.dependencies] +python = "~3.10" +django = "4.2" +djangorestframework = "3.14.0" +adrf = "0.1.0" +# check weasyprint performance before updating: https://kozea.github.io/WeasyPerf/ +# do not update to 58.* until text spacing and footnote-call issues are fixed +weasyprint = "57.2" + +django-phonenumber-field = { extras = ["phonenumberslite"], version = "^7.0.0" } +django-csp = "^3.7" +django-storages = "^1.13.2" +drf-nested-routers = "^0.93.4" +django-filter = "^23.1" + +psycopg = { extras = ["binary"], version = "^3.1.8" } +gunicorn = "^20.1.0" +uvicorn = "^0.21.1" +whitenoise = "^6.4.0" +brotli = "^1.0.9" +requests = "^2.28.2" +httpx = "^0.23.3" + +jsonschema = "^4.17.3" +python-decouple = "^3.8" +pycryptodomex = "^3.17" +pyotp = "^2.8.0" +qrcode = { extras = ["pil"], version = "^7.4.2" } +fido2 = "^1.1.1" +authlib = "^1.2.0" +python-gnupg = "^0.5.0" + +lorem-text = "^2.1" +zipstream-new = "^1.1.8" +boto3 = "^1.26.5" +pillow-heif = "^0.10.1" +playwright = "^1.32.1" +pikepdf = "^7.1.2" +celery = { extras = ["librabbitmq"], version = "^5.2.7" } + +django-debug-toolbar = "^4.0.0" +debugpy = "^1.6.7" +watchdog = "^3.0.0" +pytest-django = "^4.5.2" +pytest-xdist = "^3.2.1" +pytest-cov = "^4.0.0" +elastic-apm = "^6.15.1" + + + +[build-system] +requires = ["poetry-core"] +build-backend = "poetry.core.masonry.api" + diff --git a/api/requirements.txt b/api/requirements.txt new file mode 100644 index 0000000..ed64eb6 --- /dev/null +++ b/api/requirements.txt @@ -0,0 +1,1183 @@ +adrf==0.1.0 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:76ef3b335668a383e0c1dd42f442dc0c80e135e59a204c459603206b24f0758e \ + --hash=sha256:799e4abca989a64bab9cf25fcab0ae902dec8cbd2d8961258375c8ea68593cd6 +amqp==5.1.1 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:2c1b13fecc0893e946c65cbd5f36427861cffa4ea2201d8f6fca22e2a373b5e2 \ + --hash=sha256:6f0956d2c23d8fa6e7691934d8c3930eadb44972cbbd1a7ae3a520f735d43359 +anyio==3.6.2 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:25ea0d673ae30af41a0c442f81cf3b38c7e79fdc7b60335a4c14e05eb0947421 \ + --hash=sha256:fbbe32bd270d2a2ef3ed1c5d45041250284e31fc0a4df4a5a6071842051a51e3 +asgiref==3.6.0 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:71e68008da809b957b7ee4b43dbccff33d1b23519fb8344e33f049897077afac \ + --hash=sha256:9567dfe7bd8d3c8c892227827c41cce860b368104c3431da67a0c5a65a949506 +attrs==22.2.0 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:29e95c7f6778868dbd49170f98f8818f78f3dc5e0e37c0b1f474e3561b240836 \ + --hash=sha256:c9227bfc2f01993c03f68db37d1d15c9690188323c067c641f1a35ca58185f99 +authlib==1.2.0 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:4ddf4fd6cfa75c9a460b361d4bd9dac71ffda0be879dbe4292a02e92349ad55a \ + --hash=sha256:4fa3e80883a5915ef9f5bc28630564bc4ed5b5af39812a3ff130ec76bd631e9d +billiard==3.6.4.0 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:299de5a8da28a783d51b197d496bef4f1595dd023a93a4f59dde1886ae905547 \ + --hash=sha256:87103ea78fa6ab4d5c751c4909bcff74617d985de7fa8b672cf8618afd5a875b +boto3==1.26.5 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:7b6fb7b0346c239b53ad5e5fdf5eeef3c6452186d91239beeb66f106531cb2c3 \ + --hash=sha256:cb4eca34b6e13e4ead46a68f66759feaae6bf5e97362b2c979b7b9f1d203715e +botocore==1.29.94 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:01b9e066b9eea719ee852e91841b92c7371f6bd388cf6186b5d55508e0f7fa1b \ + --hash=sha256:3748b79e6fc95c19d890aa7439a53b9d468a4c4918439b2ba5cc3c13bfaff817 +brotli==1.0.9 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:02177603aaca36e1fd21b091cb742bb3b305a569e2402f1ca38af471777fb019 \ + --hash=sha256:11d3283d89af7033236fa4e73ec2cbe743d4f6a81d41bd234f24bf63dde979df \ + --hash=sha256:12effe280b8ebfd389022aa65114e30407540ccb89b177d3fbc9a4f177c4bd5d \ + --hash=sha256:160c78292e98d21e73a4cc7f76a234390e516afcd982fa17e1422f7c6a9ce9c8 \ + --hash=sha256:16d528a45c2e1909c2798f27f7bf0a3feec1dc9e50948e738b961618e38b6a7b \ + --hash=sha256:19598ecddd8a212aedb1ffa15763dd52a388518c4550e615aed88dc3753c0f0c \ + --hash=sha256:1c48472a6ba3b113452355b9af0a60da5c2ae60477f8feda8346f8fd48e3e87c \ + --hash=sha256:268fe94547ba25b58ebc724680609c8ee3e5a843202e9a381f6f9c5e8bdb5c70 \ + --hash=sha256:269a5743a393c65db46a7bb982644c67ecba4b8d91b392403ad8a861ba6f495f \ + --hash=sha256:26d168aac4aaec9a4394221240e8a5436b5634adc3cd1cdf637f6645cecbf181 \ + --hash=sha256:29d1d350178e5225397e28ea1b7aca3648fcbab546d20e7475805437bfb0a130 \ + --hash=sha256:2aad0e0baa04517741c9bb5b07586c642302e5fb3e75319cb62087bd0995ab19 \ + --hash=sha256:3148362937217b7072cf80a2dcc007f09bb5ecb96dae4617316638194113d5be \ + --hash=sha256:330e3f10cd01da535c70d09c4283ba2df5fb78e915bea0a28becad6e2ac010be \ + --hash=sha256:336b40348269f9b91268378de5ff44dc6fbaa2268194f85177b53463d313842a \ + --hash=sha256:3496fc835370da351d37cada4cf744039616a6db7d13c430035e901443a34daa \ + --hash=sha256:35a3edbe18e876e596553c4007a087f8bcfd538f19bc116917b3c7522fca0429 \ + --hash=sha256:3b78a24b5fd13c03ee2b7b86290ed20efdc95da75a3557cc06811764d5ad1126 \ + --hash=sha256:3b8b09a16a1950b9ef495a0f8b9d0a87599a9d1f179e2d4ac014b2ec831f87e7 \ + --hash=sha256:3c1306004d49b84bd0c4f90457c6f57ad109f5cc6067a9664e12b7b79a9948ad \ + --hash=sha256:3ffaadcaeafe9d30a7e4e1e97ad727e4f5610b9fa2f7551998471e3736738679 \ + --hash=sha256:40d15c79f42e0a2c72892bf407979febd9cf91f36f495ffb333d1d04cebb34e4 \ + --hash=sha256:44bb8ff420c1d19d91d79d8c3574b8954288bdff0273bf788954064d260d7ab0 \ + --hash=sha256:4688c1e42968ba52e57d8670ad2306fe92e0169c6f3af0089be75bbac0c64a3b \ + --hash=sha256:495ba7e49c2db22b046a53b469bbecea802efce200dffb69b93dd47397edc9b6 \ + --hash=sha256:4d1b810aa0ed773f81dceda2cc7b403d01057458730e309856356d4ef4188438 \ + --hash=sha256:503fa6af7da9f4b5780bb7e4cbe0c639b010f12be85d02c99452825dd0feef3f \ + --hash=sha256:56d027eace784738457437df7331965473f2c0da2c70e1a1f6fdbae5402e0389 \ + --hash=sha256:5913a1177fc36e30fcf6dc868ce23b0453952c78c04c266d3149b3d39e1410d6 \ + --hash=sha256:5b6ef7d9f9c38292df3690fe3e302b5b530999fa90014853dcd0d6902fb59f26 \ + --hash=sha256:5bf37a08493232fbb0f8229f1824b366c2fc1d02d64e7e918af40acd15f3e337 \ + --hash=sha256:5cb1e18167792d7d21e21365d7650b72d5081ed476123ff7b8cac7f45189c0c7 \ + --hash=sha256:61a7ee1f13ab913897dac7da44a73c6d44d48a4adff42a5701e3239791c96e14 \ + --hash=sha256:622a231b08899c864eb87e85f81c75e7b9ce05b001e59bbfbf43d4a71f5f32b2 \ + --hash=sha256:68715970f16b6e92c574c30747c95cf8cf62804569647386ff032195dc89a430 \ + --hash=sha256:6b2ae9f5f67f89aade1fab0f7fd8f2832501311c363a21579d02defa844d9296 \ + --hash=sha256:6c772d6c0a79ac0f414a9f8947cc407e119b8598de7621f39cacadae3cf57d12 \ + --hash=sha256:6d847b14f7ea89f6ad3c9e3901d1bc4835f6b390a9c71df999b0162d9bb1e20f \ + --hash=sha256:73fd30d4ce0ea48010564ccee1a26bfe39323fde05cb34b5863455629db61dc7 \ + --hash=sha256:76ffebb907bec09ff511bb3acc077695e2c32bc2142819491579a695f77ffd4d \ + --hash=sha256:7bbff90b63328013e1e8cb50650ae0b9bac54ffb4be6104378490193cd60f85a \ + --hash=sha256:7cb81373984cc0e4682f31bc3d6be9026006d96eecd07ea49aafb06897746452 \ + --hash=sha256:7ee83d3e3a024a9618e5be64648d6d11c37047ac48adff25f12fa4226cf23d1c \ + --hash=sha256:854c33dad5ba0fbd6ab69185fec8dab89e13cda6b7d191ba111987df74f38761 \ + --hash=sha256:85f7912459c67eaab2fb854ed2bc1cc25772b300545fe7ed2dc03954da638649 \ + --hash=sha256:87fdccbb6bb589095f413b1e05734ba492c962b4a45a13ff3408fa44ffe6479b \ + --hash=sha256:88c63a1b55f352b02c6ffd24b15ead9fc0e8bf781dbe070213039324922a2eea \ + --hash=sha256:8a674ac10e0a87b683f4fa2b6fa41090edfd686a6524bd8dedbd6138b309175c \ + --hash=sha256:8ed6a5b3d23ecc00ea02e1ed8e0ff9a08f4fc87a1f58a2530e71c0f48adf882f \ + --hash=sha256:93130612b837103e15ac3f9cbacb4613f9e348b58b3aad53721d92e57f96d46a \ + --hash=sha256:9744a863b489c79a73aba014df554b0e7a0fc44ef3f8a0ef2a52919c7d155031 \ + --hash=sha256:9749a124280a0ada4187a6cfd1ffd35c350fb3af79c706589d98e088c5044267 \ + --hash=sha256:97f715cf371b16ac88b8c19da00029804e20e25f30d80203417255d239f228b5 \ + --hash=sha256:9bf919756d25e4114ace16a8ce91eb340eb57a08e2c6950c3cebcbe3dff2a5e7 \ + --hash=sha256:9d12cf2851759b8de8ca5fde36a59c08210a97ffca0eb94c532ce7b17c6a3d1d \ + --hash=sha256:9ed4c92a0665002ff8ea852353aeb60d9141eb04109e88928026d3c8a9e5433c \ + --hash=sha256:a72661af47119a80d82fa583b554095308d6a4c356b2a554fdc2799bc19f2a43 \ + --hash=sha256:afde17ae04d90fbe53afb628f7f2d4ca022797aa093e809de5c3cf276f61bbfa \ + --hash=sha256:b1375b5d17d6145c798661b67e4ae9d5496920d9265e2f00f1c2c0b5ae91fbde \ + --hash=sha256:b336c5e9cf03c7be40c47b5fd694c43c9f1358a80ba384a21969e0b4e66a9b17 \ + --hash=sha256:b3523f51818e8f16599613edddb1ff924eeb4b53ab7e7197f85cbc321cdca32f \ + --hash=sha256:b43775532a5904bc938f9c15b77c613cb6ad6fb30990f3b0afaea82797a402d8 \ + --hash=sha256:b663f1e02de5d0573610756398e44c130add0eb9a3fc912a09665332942a2efb \ + --hash=sha256:b83bb06a0192cccf1eb8d0a28672a1b79c74c3a8a5f2619625aeb6f28b3a82bb \ + --hash=sha256:ba72d37e2a924717990f4d7482e8ac88e2ef43fb95491eb6e0d124d77d2a150d \ + --hash=sha256:c2415d9d082152460f2bd4e382a1e85aed233abc92db5a3880da2257dc7daf7b \ + --hash=sha256:c83aa123d56f2e060644427a882a36b3c12db93727ad7a7b9efd7d7f3e9cc2c4 \ + --hash=sha256:c8e521a0ce7cf690ca84b8cc2272ddaf9d8a50294fd086da67e517439614c755 \ + --hash=sha256:cab1b5964b39607a66adbba01f1c12df2e55ac36c81ec6ed44f2fca44178bf1a \ + --hash=sha256:cb02ed34557afde2d2da68194d12f5719ee96cfb2eacc886352cb73e3808fc5d \ + --hash=sha256:cc0283a406774f465fb45ec7efb66857c09ffefbe49ec20b7882eff6d3c86d3a \ + --hash=sha256:cfc391f4429ee0a9370aa93d812a52e1fee0f37a81861f4fdd1f4fb28e8547c3 \ + --hash=sha256:db844eb158a87ccab83e868a762ea8024ae27337fc7ddcbfcddd157f841fdfe7 \ + --hash=sha256:defed7ea5f218a9f2336301e6fd379f55c655bea65ba2476346340a0ce6f74a1 \ + --hash=sha256:e16eb9541f3dd1a3e92b89005e37b1257b157b7256df0e36bd7b33b50be73bcb \ + --hash=sha256:e1abbeef02962596548382e393f56e4c94acd286bd0c5afba756cffc33670e8a \ + --hash=sha256:e23281b9a08ec338469268f98f194658abfb13658ee98e2b7f85ee9dd06caa91 \ + --hash=sha256:e2d9e1cbc1b25e22000328702b014227737756f4b5bf5c485ac1d8091ada078b \ + --hash=sha256:e48f4234f2469ed012a98f4b7874e7f7e173c167bed4934912a29e03167cf6b1 \ + --hash=sha256:e4c4e92c14a57c9bd4cb4be678c25369bf7a092d55fd0866f759e425b9660806 \ + --hash=sha256:ec1947eabbaf8e0531e8e899fc1d9876c179fc518989461f5d24e2223395a9e3 \ + --hash=sha256:f909bbbc433048b499cb9db9e713b5d8d949e8c109a2a548502fb9aa8630f0b1 +brotlicffi==1.0.9.2 ; platform_python_implementation != "CPython" and python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:0c248a68129d8fc6a217767406c731e498c3e19a7be05ea0a90c3c86637b7d96 \ + --hash=sha256:137c4635edcdf593de5ce9d0daa596bf499591b16b8fca5fd72a490deb54b2ee \ + --hash=sha256:15e0db52c56056be6310fc116b3d7c6f34185594e261f23790b2fb6489998363 \ + --hash=sha256:16a0c9392a1059e2e62839fbd037d2e7e03c8ae5da65e9746f582464f7fab1bb \ + --hash=sha256:1e27c43ef72a278f9739b12b2df80ee72048cd4cbe498f8bbe08aaaa67a5d5c8 \ + --hash=sha256:21cd400d24b344c218d8e32b394849e31b7c15784667575dbda9f65c46a64b0a \ + --hash=sha256:2be4fb8a7cb482f226af686cd06d2a2cab164ccdf99e460f8e3a5ec9a5337da2 \ + --hash=sha256:2e4629f7690ded66c8818715c6d4dd6a7ff6a4f10fad6186fe99850f781ce210 \ + --hash=sha256:408c810c599786fb806556ff17e844a903884e6370ca400bcec7fa286149f39c \ + --hash=sha256:408ec4359f9763280d5c4e0ad29c51d1240b25fdd18719067e972163b4125b98 \ + --hash=sha256:4454c3baedc277fd6e65f983e3eb8e77f4bc15060f69370a0201746e2edeca81 \ + --hash=sha256:52c1c12dad6eb1d44213a0a76acf5f18f64653bd801300bef5e2f983405bdde5 \ + --hash=sha256:551305703d12a2dd1ae43d3dde35dee20b1cb49b5796279d4d34e2c6aec6be4d \ + --hash=sha256:586f0ea3c2eed455d5f2330b9ab4a591514c8de0ee53d445645efcfbf053c69f \ + --hash=sha256:71061f8bc86335b652e442260c4367b782a92c6e295cf5a10eff84c7d19d8cf5 \ + --hash=sha256:75a46bc5ed2753e1648cc211dcb2c1ac66116038766822dc104023f67ff4dfd8 \ + --hash=sha256:7bb913d5bf3b4ce2ec59872711dc9faaff5f320c3c3827cada2d8a7b793a7753 \ + --hash=sha256:7e72978f4090a161885b114f87b784f538dcb77dafc6602592c1cf39ae8d243d \ + --hash=sha256:8e7221d8a084d32d15c7b58e0ce0573972375c5038423dbe83f217cfe512e680 \ + --hash=sha256:9030cd5099252d16bfa4e22659c84a89c102e94f8e81d30764788b72e2d7cfb7 \ + --hash=sha256:9078432af4785f35ab3840587eed7fb131e3fc77eb2a739282b649b343c584dd \ + --hash=sha256:916b790f967a18a595e61f218c252f83718ac91f24157d622cf0fa710cd26ab7 \ + --hash=sha256:94d2810efc5723f1447b332223b197466190518a3eeca93b9f357efb5b22c6dc \ + --hash=sha256:9e70f3e20f317d70912b10dbec48b29114d3dbd0e9d88475cb328e6c086f0546 \ + --hash=sha256:a6208d82c3172eeeb3be83ed4efd5831552c7cd47576468e50fcf0fb23fcf97f \ + --hash=sha256:a73099858ee343e8801710a08be8d194f47715ff21e98d92a19ac461058f52d1 \ + --hash=sha256:af8a1b7bcfccf9c41a3c8654994d6a81821fdfe4caddcfe5045bfda936546ca3 \ + --hash=sha256:ba4a00263af40e875ec3d6c7f623cbf8c795b55705da18c64ec36b6bf0848bc5 \ + --hash=sha256:df78aa47741122b0d5463f1208b7bb18bc9706dee5152d9f56e0ead4865015cd \ + --hash=sha256:feb942814285bdc5e97efc77a04e48283c17dfab9ea082d79c0a7b9e53ef1eab +celery[librabbitmq]==5.2.7 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:138420c020cd58d6707e6257b6beda91fd39af7afde5d36c6334d175302c0e14 \ + --hash=sha256:fafbd82934d30f8a004f81e8f7a062e31413a23d444be8ee3326553915958c6d +certifi==2022.12.7 ; python_version >= "3.10" and python_version < "4" \ + --hash=sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3 \ + --hash=sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18 +cffi==1.15.1 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5 \ + --hash=sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef \ + --hash=sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104 \ + --hash=sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426 \ + --hash=sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405 \ + --hash=sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375 \ + --hash=sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a \ + --hash=sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e \ + --hash=sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc \ + --hash=sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf \ + --hash=sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185 \ + --hash=sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497 \ + --hash=sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3 \ + --hash=sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35 \ + --hash=sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c \ + --hash=sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83 \ + --hash=sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21 \ + --hash=sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca \ + --hash=sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984 \ + --hash=sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac \ + --hash=sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd \ + --hash=sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee \ + --hash=sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a \ + --hash=sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2 \ + --hash=sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192 \ + --hash=sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7 \ + --hash=sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585 \ + --hash=sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f \ + --hash=sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e \ + --hash=sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27 \ + --hash=sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b \ + --hash=sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e \ + --hash=sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e \ + --hash=sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d \ + --hash=sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c \ + --hash=sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415 \ + --hash=sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82 \ + --hash=sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02 \ + --hash=sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314 \ + --hash=sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325 \ + --hash=sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c \ + --hash=sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3 \ + --hash=sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914 \ + --hash=sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045 \ + --hash=sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d \ + --hash=sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9 \ + --hash=sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5 \ + --hash=sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2 \ + --hash=sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c \ + --hash=sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3 \ + --hash=sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2 \ + --hash=sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8 \ + --hash=sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d \ + --hash=sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d \ + --hash=sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9 \ + --hash=sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162 \ + --hash=sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76 \ + --hash=sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4 \ + --hash=sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e \ + --hash=sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9 \ + --hash=sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6 \ + --hash=sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b \ + --hash=sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01 \ + --hash=sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0 +charset-normalizer==3.1.0 ; python_version >= "3.10" and python_version < "4" \ + --hash=sha256:04afa6387e2b282cf78ff3dbce20f0cc071c12dc8f685bd40960cc68644cfea6 \ + --hash=sha256:04eefcee095f58eaabe6dc3cc2262f3bcd776d2c67005880894f447b3f2cb9c1 \ + --hash=sha256:0be65ccf618c1e7ac9b849c315cc2e8a8751d9cfdaa43027d4f6624bd587ab7e \ + --hash=sha256:0c95f12b74681e9ae127728f7e5409cbbef9cd914d5896ef238cc779b8152373 \ + --hash=sha256:0ca564606d2caafb0abe6d1b5311c2649e8071eb241b2d64e75a0d0065107e62 \ + --hash=sha256:10c93628d7497c81686e8e5e557aafa78f230cd9e77dd0c40032ef90c18f2230 \ + --hash=sha256:11d117e6c63e8f495412d37e7dc2e2fff09c34b2d09dbe2bee3c6229577818be \ + --hash=sha256:11d3bcb7be35e7b1bba2c23beedac81ee893ac9871d0ba79effc7fc01167db6c \ + --hash=sha256:12a2b561af122e3d94cdb97fe6fb2bb2b82cef0cdca131646fdb940a1eda04f0 \ + --hash=sha256:12d1a39aa6b8c6f6248bb54550efcc1c38ce0d8096a146638fd4738e42284448 \ + --hash=sha256:1435ae15108b1cb6fffbcea2af3d468683b7afed0169ad718451f8db5d1aff6f \ + --hash=sha256:1c60b9c202d00052183c9be85e5eaf18a4ada0a47d188a83c8f5c5b23252f649 \ + --hash=sha256:1e8fcdd8f672a1c4fc8d0bd3a2b576b152d2a349782d1eb0f6b8e52e9954731d \ + --hash=sha256:20064ead0717cf9a73a6d1e779b23d149b53daf971169289ed2ed43a71e8d3b0 \ + --hash=sha256:21fa558996782fc226b529fdd2ed7866c2c6ec91cee82735c98a197fae39f706 \ + --hash=sha256:22908891a380d50738e1f978667536f6c6b526a2064156203d418f4856d6e86a \ + --hash=sha256:3160a0fd9754aab7d47f95a6b63ab355388d890163eb03b2d2b87ab0a30cfa59 \ + --hash=sha256:322102cdf1ab682ecc7d9b1c5eed4ec59657a65e1c146a0da342b78f4112db23 \ + --hash=sha256:34e0a2f9c370eb95597aae63bf85eb5e96826d81e3dcf88b8886012906f509b5 \ + --hash=sha256:3573d376454d956553c356df45bb824262c397c6e26ce43e8203c4c540ee0acb \ + --hash=sha256:3747443b6a904001473370d7810aa19c3a180ccd52a7157aacc264a5ac79265e \ + --hash=sha256:38e812a197bf8e71a59fe55b757a84c1f946d0ac114acafaafaf21667a7e169e \ + --hash=sha256:3a06f32c9634a8705f4ca9946d667609f52cf130d5548881401f1eb2c39b1e2c \ + --hash=sha256:3a5fc78f9e3f501a1614a98f7c54d3969f3ad9bba8ba3d9b438c3bc5d047dd28 \ + --hash=sha256:3d9098b479e78c85080c98e1e35ff40b4a31d8953102bb0fd7d1b6f8a2111a3d \ + --hash=sha256:3dc5b6a8ecfdc5748a7e429782598e4f17ef378e3e272eeb1340ea57c9109f41 \ + --hash=sha256:4155b51ae05ed47199dc5b2a4e62abccb274cee6b01da5b895099b61b1982974 \ + --hash=sha256:49919f8400b5e49e961f320c735388ee686a62327e773fa5b3ce6721f7e785ce \ + --hash=sha256:53d0a3fa5f8af98a1e261de6a3943ca631c526635eb5817a87a59d9a57ebf48f \ + --hash=sha256:5f008525e02908b20e04707a4f704cd286d94718f48bb33edddc7d7b584dddc1 \ + --hash=sha256:628c985afb2c7d27a4800bfb609e03985aaecb42f955049957814e0491d4006d \ + --hash=sha256:65ed923f84a6844de5fd29726b888e58c62820e0769b76565480e1fdc3d062f8 \ + --hash=sha256:6734e606355834f13445b6adc38b53c0fd45f1a56a9ba06c2058f86893ae8017 \ + --hash=sha256:6baf0baf0d5d265fa7944feb9f7451cc316bfe30e8df1a61b1bb08577c554f31 \ + --hash=sha256:6f4f4668e1831850ebcc2fd0b1cd11721947b6dc7c00bf1c6bd3c929ae14f2c7 \ + --hash=sha256:6f5c2e7bc8a4bf7c426599765b1bd33217ec84023033672c1e9a8b35eaeaaaf8 \ + --hash=sha256:6f6c7a8a57e9405cad7485f4c9d3172ae486cfef1344b5ddd8e5239582d7355e \ + --hash=sha256:7381c66e0561c5757ffe616af869b916c8b4e42b367ab29fedc98481d1e74e14 \ + --hash=sha256:73dc03a6a7e30b7edc5b01b601e53e7fc924b04e1835e8e407c12c037e81adbd \ + --hash=sha256:74db0052d985cf37fa111828d0dd230776ac99c740e1a758ad99094be4f1803d \ + --hash=sha256:75f2568b4189dda1c567339b48cba4ac7384accb9c2a7ed655cd86b04055c795 \ + --hash=sha256:78cacd03e79d009d95635e7d6ff12c21eb89b894c354bd2b2ed0b4763373693b \ + --hash=sha256:80d1543d58bd3d6c271b66abf454d437a438dff01c3e62fdbcd68f2a11310d4b \ + --hash=sha256:830d2948a5ec37c386d3170c483063798d7879037492540f10a475e3fd6f244b \ + --hash=sha256:891cf9b48776b5c61c700b55a598621fdb7b1e301a550365571e9624f270c203 \ + --hash=sha256:8f25e17ab3039b05f762b0a55ae0b3632b2e073d9c8fc88e89aca31a6198e88f \ + --hash=sha256:9a3267620866c9d17b959a84dd0bd2d45719b817245e49371ead79ed4f710d19 \ + --hash=sha256:a04f86f41a8916fe45ac5024ec477f41f886b3c435da2d4e3d2709b22ab02af1 \ + --hash=sha256:aaf53a6cebad0eae578f062c7d462155eada9c172bd8c4d250b8c1d8eb7f916a \ + --hash=sha256:abc1185d79f47c0a7aaf7e2412a0eb2c03b724581139193d2d82b3ad8cbb00ac \ + --hash=sha256:ac0aa6cd53ab9a31d397f8303f92c42f534693528fafbdb997c82bae6e477ad9 \ + --hash=sha256:ac3775e3311661d4adace3697a52ac0bab17edd166087d493b52d4f4f553f9f0 \ + --hash=sha256:b06f0d3bf045158d2fb8837c5785fe9ff9b8c93358be64461a1089f5da983137 \ + --hash=sha256:b116502087ce8a6b7a5f1814568ccbd0e9f6cfd99948aa59b0e241dc57cf739f \ + --hash=sha256:b82fab78e0b1329e183a65260581de4375f619167478dddab510c6c6fb04d9b6 \ + --hash=sha256:bd7163182133c0c7701b25e604cf1611c0d87712e56e88e7ee5d72deab3e76b5 \ + --hash=sha256:c36bcbc0d5174a80d6cccf43a0ecaca44e81d25be4b7f90f0ed7bcfbb5a00909 \ + --hash=sha256:c3af8e0f07399d3176b179f2e2634c3ce9c1301379a6b8c9c9aeecd481da494f \ + --hash=sha256:c84132a54c750fda57729d1e2599bb598f5fa0344085dbde5003ba429a4798c0 \ + --hash=sha256:cb7b2ab0188829593b9de646545175547a70d9a6e2b63bf2cd87a0a391599324 \ + --hash=sha256:cca4def576f47a09a943666b8f829606bcb17e2bc2d5911a46c8f8da45f56755 \ + --hash=sha256:cf6511efa4801b9b38dc5546d7547d5b5c6ef4b081c60b23e4d941d0eba9cbeb \ + --hash=sha256:d16fd5252f883eb074ca55cb622bc0bee49b979ae4e8639fff6ca3ff44f9f854 \ + --hash=sha256:d2686f91611f9e17f4548dbf050e75b079bbc2a82be565832bc8ea9047b61c8c \ + --hash=sha256:d7fc3fca01da18fbabe4625d64bb612b533533ed10045a2ac3dd194bfa656b60 \ + --hash=sha256:dd5653e67b149503c68c4018bf07e42eeed6b4e956b24c00ccdf93ac79cdff84 \ + --hash=sha256:de5695a6f1d8340b12a5d6d4484290ee74d61e467c39ff03b39e30df62cf83a0 \ + --hash=sha256:e0ac8959c929593fee38da1c2b64ee9778733cdf03c482c9ff1d508b6b593b2b \ + --hash=sha256:e1b25e3ad6c909f398df8921780d6a3d120d8c09466720226fc621605b6f92b1 \ + --hash=sha256:e633940f28c1e913615fd624fcdd72fdba807bf53ea6925d6a588e84e1151531 \ + --hash=sha256:e89df2958e5159b811af9ff0f92614dabf4ff617c03a4c1c6ff53bf1c399e0e1 \ + --hash=sha256:ea9f9c6034ea2d93d9147818f17c2a0860d41b71c38b9ce4d55f21b6f9165a11 \ + --hash=sha256:f645caaf0008bacf349875a974220f1f1da349c5dbe7c4ec93048cdc785a3326 \ + --hash=sha256:f8303414c7b03f794347ad062c0516cee0e15f7a612abd0ce1e25caf6ceb47df \ + --hash=sha256:fca62a8301b605b954ad2e9c3666f9d97f63872aa4efcae5492baca2056b74ab +click-didyoumean==0.3.0 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:a0713dc7a1de3f06bc0df5a9567ad19ead2d3d5689b434768a6145bff77c0667 \ + --hash=sha256:f184f0d851d96b6d29297354ed981b7dd71df7ff500d82fa6d11f0856bee8035 +click-plugins==1.1.1 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:46ab999744a9d831159c3411bb0c79346d94a444df9a3a3742e9ed63645f264b \ + --hash=sha256:5d262006d3222f5057fd81e1623d4443e41dcda5dc815c06b442aa3c02889fc8 +click-repl==0.2.0 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:94b3fbbc9406a236f176e0506524b2937e4b23b6f4c0c0b2a0a83f8a64e9194b \ + --hash=sha256:cd12f68d745bf6151210790540b4cb064c7b13e571bc64b6957d98d120dacfd8 +click==8.1.3 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e \ + --hash=sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48 +colorama==0.4.6 ; python_version >= "3.10" and python_version < "4.0" and platform_system == "Windows" or python_version >= "3.10" and python_version < "4.0" and sys_platform == "win32" \ + --hash=sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44 \ + --hash=sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6 +coverage[toml]==7.2.2 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:006ed5582e9cbc8115d2e22d6d2144a0725db542f654d9d4fda86793832f873d \ + --hash=sha256:046936ab032a2810dcaafd39cc4ef6dd295df1a7cbead08fe996d4765fca9fe4 \ + --hash=sha256:0484d9dd1e6f481b24070c87561c8d7151bdd8b044c93ac99faafd01f695c78e \ + --hash=sha256:0ce383d5f56d0729d2dd40e53fe3afeb8f2237244b0975e1427bfb2cf0d32bab \ + --hash=sha256:186e0fc9cf497365036d51d4d2ab76113fb74f729bd25da0975daab2e107fd90 \ + --hash=sha256:2199988e0bc8325d941b209f4fd1c6fa007024b1442c5576f1a32ca2e48941e6 \ + --hash=sha256:299bc75cb2a41e6741b5e470b8c9fb78d931edbd0cd009c58e5c84de57c06731 \ + --hash=sha256:3668291b50b69a0c1ef9f462c7df2c235da3c4073f49543b01e7eb1dee7dd540 \ + --hash=sha256:36dd42da34fe94ed98c39887b86db9d06777b1c8f860520e21126a75507024f2 \ + --hash=sha256:38004671848b5745bb05d4d621526fca30cee164db42a1f185615f39dc997292 \ + --hash=sha256:387fb46cb8e53ba7304d80aadca5dca84a2fbf6fe3faf6951d8cf2d46485d1e5 \ + --hash=sha256:3eb55b7b26389dd4f8ae911ba9bc8c027411163839dea4c8b8be54c4ee9ae10b \ + --hash=sha256:420f94a35e3e00a2b43ad5740f935358e24478354ce41c99407cddd283be00d2 \ + --hash=sha256:4ac0f522c3b6109c4b764ffec71bf04ebc0523e926ca7cbe6c5ac88f84faced0 \ + --hash=sha256:4c752d5264053a7cf2fe81c9e14f8a4fb261370a7bb344c2a011836a96fb3f57 \ + --hash=sha256:4f01911c010122f49a3e9bdc730eccc66f9b72bd410a3a9d3cb8448bb50d65d3 \ + --hash=sha256:4f68ee32d7c4164f1e2c8797535a6d0a3733355f5861e0f667e37df2d4b07140 \ + --hash=sha256:4fa54fb483decc45f94011898727802309a109d89446a3c76387d016057d2c84 \ + --hash=sha256:507e4720791977934bba016101579b8c500fb21c5fa3cd4cf256477331ddd988 \ + --hash=sha256:53d0fd4c17175aded9c633e319360d41a1f3c6e352ba94edcb0fa5167e2bad67 \ + --hash=sha256:55272f33da9a5d7cccd3774aeca7a01e500a614eaea2a77091e9be000ecd401d \ + --hash=sha256:5764e1f7471cb8f64b8cda0554f3d4c4085ae4b417bfeab236799863703e5de2 \ + --hash=sha256:57b77b9099f172804e695a40ebaa374f79e4fb8b92f3e167f66facbf92e8e7f5 \ + --hash=sha256:5afdad4cc4cc199fdf3e18088812edcf8f4c5a3c8e6cb69127513ad4cb7471a9 \ + --hash=sha256:5cc0783844c84af2522e3a99b9b761a979a3ef10fb87fc4048d1ee174e18a7d8 \ + --hash=sha256:5e1df45c23d4230e3d56d04414f9057eba501f78db60d4eeecfcb940501b08fd \ + --hash=sha256:6146910231ece63facfc5984234ad1b06a36cecc9fd0c028e59ac7c9b18c38c6 \ + --hash=sha256:797aad79e7b6182cb49c08cc5d2f7aa7b2128133b0926060d0a8889ac43843be \ + --hash=sha256:7c20b731211261dc9739bbe080c579a1835b0c2d9b274e5fcd903c3a7821cf88 \ + --hash=sha256:817295f06eacdc8623dc4df7d8b49cea65925030d4e1e2a7c7218380c0072c25 \ + --hash=sha256:81f63e0fb74effd5be736cfe07d710307cc0a3ccb8f4741f7f053c057615a137 \ + --hash=sha256:872d6ce1f5be73f05bea4df498c140b9e7ee5418bfa2cc8204e7f9b817caa968 \ + --hash=sha256:8c99cb7c26a3039a8a4ee3ca1efdde471e61b4837108847fb7d5be7789ed8fd9 \ + --hash=sha256:8dbe2647bf58d2c5a6c5bcc685f23b5f371909a5624e9f5cd51436d6a9f6c6ef \ + --hash=sha256:8efb48fa743d1c1a65ee8787b5b552681610f06c40a40b7ef94a5b517d885c54 \ + --hash=sha256:92ebc1619650409da324d001b3a36f14f63644c7f0a588e331f3b0f67491f512 \ + --hash=sha256:9d22e94e6dc86de981b1b684b342bec5e331401599ce652900ec59db52940005 \ + --hash=sha256:ba279aae162b20444881fc3ed4e4f934c1cf8620f3dab3b531480cf602c76b7f \ + --hash=sha256:bc4803779f0e4b06a2361f666e76f5c2e3715e8e379889d02251ec911befd149 \ + --hash=sha256:bfe7085783cda55e53510482fa7b5efc761fad1abe4d653b32710eb548ebdd2d \ + --hash=sha256:c448b5c9e3df5448a362208b8d4b9ed85305528313fca1b479f14f9fe0d873b8 \ + --hash=sha256:c90e73bdecb7b0d1cea65a08cb41e9d672ac6d7995603d6465ed4914b98b9ad7 \ + --hash=sha256:d2b96123a453a2d7f3995ddb9f28d01fd112319a7a4d5ca99796a7ff43f02af5 \ + --hash=sha256:d52f0a114b6a58305b11a5cdecd42b2e7f1ec77eb20e2b33969d702feafdd016 \ + --hash=sha256:d530191aa9c66ab4f190be8ac8cc7cfd8f4f3217da379606f3dd4e3d83feba69 \ + --hash=sha256:d683d230b5774816e7d784d7ed8444f2a40e7a450e5720d58af593cb0b94a212 \ + --hash=sha256:db45eec1dfccdadb179b0f9ca616872c6f700d23945ecc8f21bb105d74b1c5fc \ + --hash=sha256:db8c2c5ace167fd25ab5dd732714c51d4633f58bac21fb0ff63b0349f62755a8 \ + --hash=sha256:e2926b8abedf750c2ecf5035c07515770944acf02e1c46ab08f6348d24c5f94d \ + --hash=sha256:e627dee428a176ffb13697a2c4318d3f60b2ccdde3acdc9b3f304206ec130ccd \ + --hash=sha256:efe1c0adad110bf0ad7fb59f833880e489a61e39d699d37249bdf42f80590169 +cryptography==39.0.2 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:103e8f7155f3ce2ffa0049fe60169878d47a4364b277906386f8de21c9234aa1 \ + --hash=sha256:23df8ca3f24699167daf3e23e51f7ba7334d504af63a94af468f468b975b7dd7 \ + --hash=sha256:2725672bb53bb92dc7b4150d233cd4b8c59615cd8288d495eaa86db00d4e5c06 \ + --hash=sha256:30b1d1bfd00f6fc80d11300a29f1d8ab2b8d9febb6ed4a38a76880ec564fae84 \ + --hash=sha256:35d658536b0a4117c885728d1a7032bdc9a5974722ae298d6c533755a6ee3915 \ + --hash=sha256:50cadb9b2f961757e712a9737ef33d89b8190c3ea34d0fb6675e00edbe35d074 \ + --hash=sha256:5f8c682e736513db7d04349b4f6693690170f95aac449c56f97415c6980edef5 \ + --hash=sha256:6236a9610c912b129610eb1a274bdc1350b5df834d124fa84729ebeaf7da42c3 \ + --hash=sha256:788b3921d763ee35dfdb04248d0e3de11e3ca8eb22e2e48fef880c42e1f3c8f9 \ + --hash=sha256:8bc0008ef798231fac03fe7d26e82d601d15bd16f3afaad1c6113771566570f3 \ + --hash=sha256:8f35c17bd4faed2bc7797d2a66cbb4f986242ce2e30340ab832e5d99ae60e011 \ + --hash=sha256:b49a88ff802e1993b7f749b1eeb31134f03c8d5c956e3c125c75558955cda536 \ + --hash=sha256:bc0521cce2c1d541634b19f3ac661d7a64f9555135e9d8af3980965be717fd4a \ + --hash=sha256:bc5b871e977c8ee5a1bbc42fa8d19bcc08baf0c51cbf1586b0e87a2694dde42f \ + --hash=sha256:c43ac224aabcbf83a947eeb8b17eaf1547bce3767ee2d70093b461f31729a480 \ + --hash=sha256:d15809e0dbdad486f4ad0979753518f47980020b7a34e9fc56e8be4f60702fac \ + --hash=sha256:d7d84a512a59f4412ca8549b01f94be4161c94efc598bf09d027d67826beddc0 \ + --hash=sha256:e029b844c21116564b8b61216befabca4b500e6816fa9f0ba49527653cae2108 \ + --hash=sha256:e8a0772016feeb106efd28d4a328e77dc2edae84dfbac06061319fdb669ff828 \ + --hash=sha256:e944fe07b6f229f4c1a06a7ef906a19652bdd9fd54c761b0ff87e83ae7a30354 \ + --hash=sha256:eb40fe69cfc6f5cdab9a5ebd022131ba21453cf7b8a7fd3631f45bbf52bed612 \ + --hash=sha256:fa507318e427169ade4e9eccef39e9011cdc19534f55ca2f36ec3f388c1f70f3 \ + --hash=sha256:ffd394c7896ed7821a6d13b24657c6a34b6e2650bd84ae063cf11ccffa4f1a97 +cssselect2==0.7.0 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:1ccd984dab89fc68955043aca4e1b03e0cf29cad9880f6e28e3ba7a74b14aa5a \ + --hash=sha256:fd23a65bfd444595913f02fc71f6b286c29261e354c41d722ca7a261a49b5969 +debugpy==1.6.3 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:34d2cdd3a7c87302ba5322b86e79c32c2115be396f3f09ca13306d8a04fe0f16 \ + --hash=sha256:3c9f985944a30cfc9ae4306ac6a27b9c31dba72ca943214dad4a0ab3840f6161 \ + --hash=sha256:4e255982552b0edfe3a6264438dbd62d404baa6556a81a88f9420d3ed79b06ae \ + --hash=sha256:5ad571a36cec137ae6ed951d0ff75b5e092e9af6683da084753231150cbc5b25 \ + --hash=sha256:6efc30325b68e451118b795eff6fe8488253ca3958251d5158106d9c87581bc6 \ + --hash=sha256:7c302095a81be0d5c19f6529b600bac971440db3e226dce85347cc27e6a61908 \ + --hash=sha256:84c39940a0cac410bf6aa4db00ba174f973eef521fbe9dd058e26bcabad89c4f \ + --hash=sha256:86d784b72c5411c833af1cd45b83d80c252b77c3bfdb43db17c441d772f4c734 \ + --hash=sha256:adcfea5ea06d55d505375995e150c06445e2b20cd12885bcae566148c076636b \ + --hash=sha256:b8deaeb779699350deeed835322730a3efec170b88927debc9ba07a1a38e2585 \ + --hash=sha256:c4b2bd5c245eeb49824bf7e539f95fb17f9a756186e51c3e513e32999d8846f3 \ + --hash=sha256:c4cd6f37e3c168080d61d698390dfe2cd9e74ebf80b448069822a15dadcda57d \ + --hash=sha256:cca23cb6161ac89698d629d892520327dd1be9321c0960e610bbcb807232b45d \ + --hash=sha256:d5c814596a170a0a58fa6fad74947e30bfd7e192a5d2d7bd6a12156c2899e13a \ + --hash=sha256:daadab4403427abd090eccb38d8901afd8b393e01fd243048fab3f1d7132abb4 \ + --hash=sha256:dda8652520eae3945833e061cbe2993ad94a0b545aebd62e4e6b80ee616c76b2 \ + --hash=sha256:e8922090514a890eec99cfb991bab872dd2e353ebb793164d5f01c362b9a40bf \ + --hash=sha256:fc233a0160f3b117b20216f1169e7211b83235e3cd6749bcdd8dbb72177030c7 +deprecation==2.1.0 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:72b3bde64e5d778694b0cf68178aed03d15e15477116add3fb773e581f9518ff \ + --hash=sha256:a10811591210e1fb0e768a8c25517cabeabcba6f0bf96564f8ff45189f90b14a +django-csp==3.7 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:01443a07723f9a479d498bd7bb63571aaa771e690f64bde515db6cdb76e8041a \ + --hash=sha256:01eda02ad3f10261c74131cdc0b5a6a62b7c7ad4fd017fbefb7a14776e0a9727 +django-debug-toolbar==3.5.0 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:89a52128309eb4da12738801ff0c202d2ff8730d1c3225fac6acf630c303e661 \ + --hash=sha256:97965f2630692de316ea0c1ca5bfa81660d7ba13146dbc6be2059cf55b35d0e5 +django-filter==22.1 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:ed429e34760127e3520a67f415bec4c905d4649fbe45d0d6da37e6ff5e0287eb \ + --hash=sha256:ed473b76e84f7e83b2511bb2050c3efb36d135207d0128dfe3ae4b36e3594ba5 +django-phonenumber-field[phonenumberslite]==7.0.0 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:dab78094e83f4b1276effca9903e6728e940d055b00cc8589ad5b8a22cb6a03b \ + --hash=sha256:f1aaee276b18a8f0bf503d52eda183965ca164a6379c1e70f73718bcc8a91345 +django-storages==1.13.2 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:31dc5a992520be571908c4c40d55d292660ece3a55b8141462b4e719aa38eab3 \ + --hash=sha256:cbadd15c909ceb7247d4ffc503f12a9bec36999df8d0bef7c31e57177d512688 +django==4.2b1 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:33e3b3b80924dae3e6d4b5e697eaee724d5a35c1a430df44b1d72c802657992f \ + --hash=sha256:9bf13063a882a9b0f7028c4cdc32ea36fe104491cd7720859117990933f9c589 +djangorestframework-simplejwt==5.2.2 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:4c0d2e2513e12587d93501ac091781684a216c3ee614eb3b5a10586aef5ca845 \ + --hash=sha256:d27d4bcac2c6394f678dea8b4d0d511c6e18a7f2eb8aaeeb8a7de601aeb77c42 +djangorestframework==3.14.0 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:579a333e6256b09489cbe0a067e66abe55c6595d8926be6b99423786334350c8 \ + --hash=sha256:eb63f58c9f218e1a7d064d17a70751f528ed4e1d35547fdade9aaf4cd103fd08 +drf-nested-routers==0.93.4 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:01aa556b8c08608bb74fb34f6ca065a5183f2cda4dc0478192cc17a2581d71b0 \ + --hash=sha256:996b77f3f4dfaf64569e7b8f04e3919945f90f95366838ca5b8bed9dd709d6c5 +elastic-apm==6.14.0 ; python_version >= "3.10" and python_version < "4" \ + --hash=sha256:3f57a67595aabddc99593ce091b28de9cb279f907c20a38e10e00de23211b36d \ + --hash=sha256:6e81d8f555ed443ef33617f6857037d821768cb96a07ec21be20aa3df732a2c7 +exceptiongroup==1.1.1 ; python_version >= "3.10" and python_version < "3.11" \ + --hash=sha256:232c37c63e4f682982c8b6459f33a8981039e5fb8756b2074364e5055c498c9e \ + --hash=sha256:d484c3090ba2889ae2928419117447a14daf3c1231d5e30d0aae34f354f01785 +execnet==1.9.0 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:8f694f3ba9cc92cab508b152dcfe322153975c29bda272e2fd7f3f00f36e47c5 \ + --hash=sha256:a295f7cc774947aac58dde7fdc85f4aa00c42adf5d8f5468fc630c1acf30a142 +fido2==1.1.0 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:2b4b4e620c2100442c20678e0e951ad6d1efb3ba5ca8ebb720c4c8d543293674 \ + --hash=sha256:5fe2f6fc06429d9229660bcd60a6573b185706cf454876224a906f83f056d207 +fonttools[woff]==4.39.2 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:85245aa2fd4cf502a643c9a9a2b5a393703e150a6eaacc3e0e84bb448053f061 \ + --hash=sha256:e2d9f10337c9e3b17f9bce17a60a16a885a7d23b59b7f45ce07ea643e5580439 +greenlet==2.0.1 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:0109af1138afbfb8ae647e31a2b1ab030f58b21dd8528c27beaeb0093b7938a9 \ + --hash=sha256:0459d94f73265744fee4c2d5ec44c6f34aa8a31017e6e9de770f7bcf29710be9 \ + --hash=sha256:04957dc96669be041e0c260964cfef4c77287f07c40452e61abe19d647505581 \ + --hash=sha256:0722c9be0797f544a3ed212569ca3fe3d9d1a1b13942d10dd6f0e8601e484d26 \ + --hash=sha256:097e3dae69321e9100202fc62977f687454cd0ea147d0fd5a766e57450c569fd \ + --hash=sha256:0b493db84d124805865adc587532ebad30efa68f79ad68f11b336e0a51ec86c2 \ + --hash=sha256:13ba6e8e326e2116c954074c994da14954982ba2795aebb881c07ac5d093a58a \ + --hash=sha256:13ebf93c343dd8bd010cd98e617cb4c1c1f352a0cf2524c82d3814154116aa82 \ + --hash=sha256:1407fe45246632d0ffb7a3f4a520ba4e6051fc2cbd61ba1f806900c27f47706a \ + --hash=sha256:1bf633a50cc93ed17e494015897361010fc08700d92676c87931d3ea464123ce \ + --hash=sha256:2d0bac0385d2b43a7bd1d651621a4e0f1380abc63d6fb1012213a401cbd5bf8f \ + --hash=sha256:3001d00eba6bbf084ae60ec7f4bb8ed375748f53aeaefaf2a37d9f0370558524 \ + --hash=sha256:356e4519d4dfa766d50ecc498544b44c0249b6de66426041d7f8b751de4d6b48 \ + --hash=sha256:38255a3f1e8942573b067510f9611fc9e38196077b0c8eb7a8c795e105f9ce77 \ + --hash=sha256:3d75b8d013086b08e801fbbb896f7d5c9e6ccd44f13a9241d2bf7c0df9eda928 \ + --hash=sha256:41b825d65f31e394b523c84db84f9383a2f7eefc13d987f308f4663794d2687e \ + --hash=sha256:42e602564460da0e8ee67cb6d7236363ee5e131aa15943b6670e44e5c2ed0f67 \ + --hash=sha256:4aeaebcd91d9fee9aa768c1b39cb12214b30bf36d2b7370505a9f2165fedd8d9 \ + --hash=sha256:4c8b1c43e75c42a6cafcc71defa9e01ead39ae80bd733a2608b297412beede68 \ + --hash=sha256:4d37990425b4687ade27810e3b1a1c37825d242ebc275066cfee8cb6b8829ccd \ + --hash=sha256:4f09b0010e55bec3239278f642a8a506b91034f03a4fb28289a7d448a67f1515 \ + --hash=sha256:505138d4fa69462447a562a7c2ef723c6025ba12ac04478bc1ce2fcc279a2db5 \ + --hash=sha256:5067920de254f1a2dee8d3d9d7e4e03718e8fd2d2d9db962c8c9fa781ae82a39 \ + --hash=sha256:56961cfca7da2fdd178f95ca407fa330c64f33289e1804b592a77d5593d9bd94 \ + --hash=sha256:5a8e05057fab2a365c81abc696cb753da7549d20266e8511eb6c9d9f72fe3e92 \ + --hash=sha256:659f167f419a4609bc0516fb18ea69ed39dbb25594934bd2dd4d0401660e8a1e \ + --hash=sha256:662e8f7cad915ba75d8017b3e601afc01ef20deeeabf281bd00369de196d7726 \ + --hash=sha256:6f61d71bbc9b4a3de768371b210d906726535d6ca43506737682caa754b956cd \ + --hash=sha256:72b00a8e7c25dcea5946692a2485b1a0c0661ed93ecfedfa9b6687bd89a24ef5 \ + --hash=sha256:811e1d37d60b47cb8126e0a929b58c046251f28117cb16fcd371eed61f66b764 \ + --hash=sha256:81b0ea3715bf6a848d6f7149d25bf018fd24554a4be01fcbbe3fdc78e890b955 \ + --hash=sha256:88c8d517e78acdf7df8a2134a3c4b964415b575d2840a2746ddb1cc6175f8608 \ + --hash=sha256:8dca09dedf1bd8684767bc736cc20c97c29bc0c04c413e3276e0962cd7aeb148 \ + --hash=sha256:974a39bdb8c90a85982cdb78a103a32e0b1be986d411303064b28a80611f6e51 \ + --hash=sha256:9e112e03d37987d7b90c1e98ba5e1b59e1645226d78d73282f45b326f7bddcb9 \ + --hash=sha256:9e9744c657d896c7b580455e739899e492a4a452e2dd4d2b3e459f6b244a638d \ + --hash=sha256:9ed358312e63bf683b9ef22c8e442ef6c5c02973f0c2a939ec1d7b50c974015c \ + --hash=sha256:9f2c221eecb7ead00b8e3ddb913c67f75cba078fd1d326053225a3f59d850d72 \ + --hash=sha256:a20d33124935d27b80e6fdacbd34205732660e0a1d35d8b10b3328179a2b51a1 \ + --hash=sha256:a4c0757db9bd08470ff8277791795e70d0bf035a011a528ee9a5ce9454b6cba2 \ + --hash=sha256:afe07421c969e259e9403c3bb658968702bc3b78ec0b6fde3ae1e73440529c23 \ + --hash=sha256:b1992ba9d4780d9af9726bbcef6a1db12d9ab1ccc35e5773685a24b7fb2758eb \ + --hash=sha256:b23d2a46d53210b498e5b701a1913697671988f4bf8e10f935433f6e7c332fb6 \ + --hash=sha256:b5e83e4de81dcc9425598d9469a624826a0b1211380ac444c7c791d4a2137c19 \ + --hash=sha256:be35822f35f99dcc48152c9839d0171a06186f2d71ef76dc57fa556cc9bf6b45 \ + --hash=sha256:be9e0fb2ada7e5124f5282d6381903183ecc73ea019568d6d63d33f25b2a9000 \ + --hash=sha256:c140e7eb5ce47249668056edf3b7e9900c6a2e22fb0eaf0513f18a1b2c14e1da \ + --hash=sha256:c6a08799e9e88052221adca55741bf106ec7ea0710bca635c208b751f0d5b617 \ + --hash=sha256:cb242fc2cda5a307a7698c93173d3627a2a90d00507bccf5bc228851e8304963 \ + --hash=sha256:cce1e90dd302f45716a7715517c6aa0468af0bf38e814ad4eab58e88fc09f7f7 \ + --hash=sha256:cd4ccc364cf75d1422e66e247e52a93da6a9b73cefa8cad696f3cbbb75af179d \ + --hash=sha256:d21681f09e297a5adaa73060737e3aa1279a13ecdcfcc6ef66c292cb25125b2d \ + --hash=sha256:d38ffd0e81ba8ef347d2be0772e899c289b59ff150ebbbbe05dc61b1246eb4e0 \ + --hash=sha256:d566b82e92ff2e09dd6342df7e0eb4ff6275a3f08db284888dcd98134dbd4243 \ + --hash=sha256:d5b0ff9878333823226d270417f24f4d06f235cb3e54d1103b71ea537a6a86ce \ + --hash=sha256:d6ee1aa7ab36475035eb48c01efae87d37936a8173fc4d7b10bb02c2d75dd8f6 \ + --hash=sha256:db38f80540083ea33bdab614a9d28bcec4b54daa5aff1668d7827a9fc769ae0a \ + --hash=sha256:ea688d11707d30e212e0110a1aac7f7f3f542a259235d396f88be68b649e47d1 \ + --hash=sha256:f6327b6907b4cb72f650a5b7b1be23a2aab395017aa6f1adb13069d66360eb3f \ + --hash=sha256:fb412b7db83fe56847df9c47b6fe3f13911b06339c2aa02dcc09dce8bbf582cd +gunicorn==20.1.0 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:9dcc4547dbb1cb284accfb15ab5667a0e5d1881cc443e0677b4882a4067a807e \ + --hash=sha256:e0a968b5ba15f8a328fdfd7ab1fcb5af4470c28aaf7e55df02a99bc13138e6e8 +h11==0.14.0 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d \ + --hash=sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761 +html5lib==1.1 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:0d78f8fde1c230e99fe37986a60526d7049ed4bf8a9fadbad5f00e22e58e041d \ + --hash=sha256:b2e5b40261e20f354d198eae92afc10d750afb487ed5e50f9c4eaf07c184146f +httpcore==0.16.3 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:c5d6f04e2fc530f39e0c077e6a30caa53f1451096120f1f38b954afd0b17c0cb \ + --hash=sha256:da1fb708784a938aa084bde4feb8317056c55037247c787bd7e19eb2c2949dc0 +httpx==0.23.3 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:9818458eb565bb54898ccb9b8b251a28785dd4a55afbc23d0eb410754fe7d0f9 \ + --hash=sha256:a211fcce9b1254ea24f0cd6af9869b3d29aba40154e947d2a07bb499b3e310d6 +idna==3.4 ; python_version >= "3.10" and python_version < "4" \ + --hash=sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4 \ + --hash=sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2 +iniconfig==2.0.0 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3 \ + --hash=sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374 +jmespath==1.0.1 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980 \ + --hash=sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe +jsonschema==4.17.0 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:5bfcf2bca16a087ade17e02b282d34af7ccd749ef76241e7f9bd7c0cb8a9424d \ + --hash=sha256:f660066c3966db7d6daeaea8a75e0b68237a48e51cf49882087757bb59916248 +kombu==5.2.4 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:37cee3ee725f94ea8bb173eaab7c1760203ea53bbebae226328600f9d2799610 \ + --hash=sha256:8b213b24293d3417bcf0d2f5537b7f756079e3ea232a8386dcc89a59fd2361a4 +librabbitmq==2.0.0 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:3116e40c02d4285b8dd69834e4cbcb1a89ea534ca9147e865f11d44e7cc56eea \ + --hash=sha256:5cdfb473573396d43d54cef9e9b4c74fa3d1516da51d04a7b261f6ef4e0bd8be \ + --hash=sha256:98e355f486964dadae7e8b51c9a60e9aa0653bbe27f6b14542687f305c4c3652 \ + --hash=sha256:c2a8113d3c831808d1d940fdf43e4882636a1efe2864df7ab3bb709a45016b37 \ + --hash=sha256:cd9cc09343b193d7cf2cff6c6a578061863bd986a4bdf38f922e9dc32e15d944 \ + --hash=sha256:ffa2363a860ab5dcc3ce3703247e05e940c73d776c03a3f3f9deaf3cf43bb96c +lorem-text==2.1 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:b512779addd08dcc221460ce428f7911b198f66c5763410f3de6d788eb62ceb3 \ + --hash=sha256:b984b5570295084012f5a9385672bab3470965c943b73d8d71a6efcd7a62b5c3 +lxml==4.9.2 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:01d36c05f4afb8f7c20fd9ed5badca32a2029b93b1750f571ccc0b142531caf7 \ + --hash=sha256:04876580c050a8c5341d706dd464ff04fd597095cc8c023252566a8826505726 \ + --hash=sha256:05ca3f6abf5cf78fe053da9b1166e062ade3fa5d4f92b4ed688127ea7d7b1d03 \ + --hash=sha256:090c6543d3696cbe15b4ac6e175e576bcc3f1ccfbba970061b7300b0c15a2140 \ + --hash=sha256:0dc313ef231edf866912e9d8f5a042ddab56c752619e92dfd3a2c277e6a7299a \ + --hash=sha256:0f2b1e0d79180f344ff9f321327b005ca043a50ece8713de61d1cb383fb8ac05 \ + --hash=sha256:13598ecfbd2e86ea7ae45ec28a2a54fb87ee9b9fdb0f6d343297d8e548392c03 \ + --hash=sha256:16efd54337136e8cd72fb9485c368d91d77a47ee2d42b057564aae201257d419 \ + --hash=sha256:1ab8f1f932e8f82355e75dda5413a57612c6ea448069d4fb2e217e9a4bed13d4 \ + --hash=sha256:223f4232855ade399bd409331e6ca70fb5578efef22cf4069a6090acc0f53c0e \ + --hash=sha256:2455cfaeb7ac70338b3257f41e21f0724f4b5b0c0e7702da67ee6c3640835b67 \ + --hash=sha256:2899456259589aa38bfb018c364d6ae7b53c5c22d8e27d0ec7609c2a1ff78b50 \ + --hash=sha256:2a29ba94d065945944016b6b74e538bdb1751a1db6ffb80c9d3c2e40d6fa9894 \ + --hash=sha256:2a87fa548561d2f4643c99cd13131acb607ddabb70682dcf1dff5f71f781a4bf \ + --hash=sha256:2e430cd2824f05f2d4f687701144556646bae8f249fd60aa1e4c768ba7018947 \ + --hash=sha256:36c3c175d34652a35475a73762b545f4527aec044910a651d2bf50de9c3352b1 \ + --hash=sha256:3818b8e2c4b5148567e1b09ce739006acfaa44ce3156f8cbbc11062994b8e8dd \ + --hash=sha256:3ab9fa9d6dc2a7f29d7affdf3edebf6ece6fb28a6d80b14c3b2fb9d39b9322c3 \ + --hash=sha256:3efea981d956a6f7173b4659849f55081867cf897e719f57383698af6f618a92 \ + --hash=sha256:4c8f293f14abc8fd3e8e01c5bd86e6ed0b6ef71936ded5bf10fe7a5efefbaca3 \ + --hash=sha256:5344a43228767f53a9df6e5b253f8cdca7dfc7b7aeae52551958192f56d98457 \ + --hash=sha256:58bfa3aa19ca4c0f28c5dde0ff56c520fbac6f0daf4fac66ed4c8d2fb7f22e74 \ + --hash=sha256:5b4545b8a40478183ac06c073e81a5ce4cf01bf1734962577cf2bb569a5b3bbf \ + --hash=sha256:5f50a1c177e2fa3ee0667a5ab79fdc6b23086bc8b589d90b93b4bd17eb0e64d1 \ + --hash=sha256:63da2ccc0857c311d764e7d3d90f429c252e83b52d1f8f1d1fe55be26827d1f4 \ + --hash=sha256:6749649eecd6a9871cae297bffa4ee76f90b4504a2a2ab528d9ebe912b101975 \ + --hash=sha256:6804daeb7ef69e7b36f76caddb85cccd63d0c56dedb47555d2fc969e2af6a1a5 \ + --hash=sha256:689bb688a1db722485e4610a503e3e9210dcc20c520b45ac8f7533c837be76fe \ + --hash=sha256:699a9af7dffaf67deeae27b2112aa06b41c370d5e7633e0ee0aea2e0b6c211f7 \ + --hash=sha256:6b418afe5df18233fc6b6093deb82a32895b6bb0b1155c2cdb05203f583053f1 \ + --hash=sha256:76cf573e5a365e790396a5cc2b909812633409306c6531a6877c59061e42c4f2 \ + --hash=sha256:7b515674acfdcadb0eb5d00d8a709868173acece5cb0be3dd165950cbfdf5409 \ + --hash=sha256:7b770ed79542ed52c519119473898198761d78beb24b107acf3ad65deae61f1f \ + --hash=sha256:7d2278d59425777cfcb19735018d897ca8303abe67cc735f9f97177ceff8027f \ + --hash=sha256:7e91ee82f4199af8c43d8158024cbdff3d931df350252288f0d4ce656df7f3b5 \ + --hash=sha256:821b7f59b99551c69c85a6039c65b75f5683bdc63270fec660f75da67469ca24 \ + --hash=sha256:822068f85e12a6e292803e112ab876bc03ed1f03dddb80154c395f891ca6b31e \ + --hash=sha256:8340225bd5e7a701c0fa98284c849c9b9fc9238abf53a0ebd90900f25d39a4e4 \ + --hash=sha256:85cabf64adec449132e55616e7ca3e1000ab449d1d0f9d7f83146ed5bdcb6d8a \ + --hash=sha256:880bbbcbe2fca64e2f4d8e04db47bcdf504936fa2b33933efd945e1b429bea8c \ + --hash=sha256:8d0b4612b66ff5d62d03bcaa043bb018f74dfea51184e53f067e6fdcba4bd8de \ + --hash=sha256:8e20cb5a47247e383cf4ff523205060991021233ebd6f924bca927fcf25cf86f \ + --hash=sha256:925073b2fe14ab9b87e73f9a5fde6ce6392da430f3004d8b72cc86f746f5163b \ + --hash=sha256:998c7c41910666d2976928c38ea96a70d1aa43be6fe502f21a651e17483a43c5 \ + --hash=sha256:9b22c5c66f67ae00c0199f6055705bc3eb3fcb08d03d2ec4059a2b1b25ed48d7 \ + --hash=sha256:9f102706d0ca011de571de32c3247c6476b55bb6bc65a20f682f000b07a4852a \ + --hash=sha256:a08cff61517ee26cb56f1e949cca38caabe9ea9fbb4b1e10a805dc39844b7d5c \ + --hash=sha256:a0a336d6d3e8b234a3aae3c674873d8f0e720b76bc1d9416866c41cd9500ffb9 \ + --hash=sha256:a35f8b7fa99f90dd2f5dc5a9fa12332642f087a7641289ca6c40d6e1a2637d8e \ + --hash=sha256:a38486985ca49cfa574a507e7a2215c0c780fd1778bb6290c21193b7211702ab \ + --hash=sha256:a5da296eb617d18e497bcf0a5c528f5d3b18dadb3619fbdadf4ed2356ef8d941 \ + --hash=sha256:a6e441a86553c310258aca15d1c05903aaf4965b23f3bc2d55f200804e005ee5 \ + --hash=sha256:a82d05da00a58b8e4c0008edbc8a4b6ec5a4bc1e2ee0fb6ed157cf634ed7fa45 \ + --hash=sha256:ab323679b8b3030000f2be63e22cdeea5b47ee0abd2d6a1dc0c8103ddaa56cd7 \ + --hash=sha256:b1f42b6921d0e81b1bcb5e395bc091a70f41c4d4e55ba99c6da2b31626c44892 \ + --hash=sha256:b23e19989c355ca854276178a0463951a653309fb8e57ce674497f2d9f208746 \ + --hash=sha256:b264171e3143d842ded311b7dccd46ff9ef34247129ff5bf5066123c55c2431c \ + --hash=sha256:b26a29f0b7fc6f0897f043ca366142d2b609dc60756ee6e4e90b5f762c6adc53 \ + --hash=sha256:b64d891da92e232c36976c80ed7ebb383e3f148489796d8d31a5b6a677825efe \ + --hash=sha256:b9cc34af337a97d470040f99ba4282f6e6bac88407d021688a5d585e44a23184 \ + --hash=sha256:bc718cd47b765e790eecb74d044cc8d37d58562f6c314ee9484df26276d36a38 \ + --hash=sha256:be7292c55101e22f2a3d4d8913944cbea71eea90792bf914add27454a13905df \ + --hash=sha256:c83203addf554215463b59f6399835201999b5e48019dc17f182ed5ad87205c9 \ + --hash=sha256:c9ec3eaf616d67db0764b3bb983962b4f385a1f08304fd30c7283954e6a7869b \ + --hash=sha256:ca34efc80a29351897e18888c71c6aca4a359247c87e0b1c7ada14f0ab0c0fb2 \ + --hash=sha256:ca989b91cf3a3ba28930a9fc1e9aeafc2a395448641df1f387a2d394638943b0 \ + --hash=sha256:d02a5399126a53492415d4906ab0ad0375a5456cc05c3fc0fc4ca11771745cda \ + --hash=sha256:d17bc7c2ccf49c478c5bdd447594e82692c74222698cfc9b5daae7ae7e90743b \ + --hash=sha256:d5bf6545cd27aaa8a13033ce56354ed9e25ab0e4ac3b5392b763d8d04b08e0c5 \ + --hash=sha256:d6b430a9938a5a5d85fc107d852262ddcd48602c120e3dbb02137c83d212b380 \ + --hash=sha256:da248f93f0418a9e9d94b0080d7ebc407a9a5e6d0b57bb30db9b5cc28de1ad33 \ + --hash=sha256:da4dd7c9c50c059aba52b3524f84d7de956f7fef88f0bafcf4ad7dde94a064e8 \ + --hash=sha256:df0623dcf9668ad0445e0558a21211d4e9a149ea8f5666917c8eeec515f0a6d1 \ + --hash=sha256:e5168986b90a8d1f2f9dc1b841467c74221bd752537b99761a93d2d981e04889 \ + --hash=sha256:efa29c2fe6b4fdd32e8ef81c1528506895eca86e1d8c4657fda04c9b3786ddf9 \ + --hash=sha256:f1496ea22ca2c830cbcbd473de8f114a320da308438ae65abad6bab7867fe38f \ + --hash=sha256:f49e52d174375a7def9915c9f06ec4e569d235ad428f70751765f48d5926678c +packaging==23.0 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:714ac14496c3e68c99c29b00845f7a2b85f3bb6f1078fd9f72fd20f0570002b2 \ + --hash=sha256:b6ad297f8907de0fa2fe1ccbd26fdaf387f5f47c7275fedf8cce89f99446cf97 +phonenumberslite==8.13.7 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:12457107a037c8918c8a181a2bfaae563452b73c619fefb2fdd2b4c7a162200e \ + --hash=sha256:286865b8e4761e0b9904b9b925f881111c52b0b75db10a504a3de0be698b7f5b +pikepdf==6.0.2 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:060bb152b5a0b08d0673add4a1f95aafe95f3414e5ff0f500284e189f9a94569 \ + --hash=sha256:0e1509a2858b9170424a9259cd56ecd264bce1f5a1fbc98a5cb58f555768315b \ + --hash=sha256:1536a7b898dc59b8a1f0cd02dbf42981275634252f51b0a863afe494f8f6da76 \ + --hash=sha256:1bdd1e64644209f27ea0672e26277bd8ea291e1403d6bcfb8394d2713db86d8b \ + --hash=sha256:1e2bbcd4b1228c55f6e12f4154b01e467926554be8505aa6791acbe5073956b1 \ + --hash=sha256:1e77e68089a394105de192f84ac27d51b4f0c10c7cefc445a56b3254ccdacadd \ + --hash=sha256:23e988d57873c64e2592784fbffc18da21a99c12e0173fc8a9ce8b56024dfce7 \ + --hash=sha256:3132f7c5d94030334d5fbca0a7834d92b9793a286445b85557ea33d030a7caad \ + --hash=sha256:339453b8f15fef409aa7384891b171045c31eba903aec7912dcd16738b6837f2 \ + --hash=sha256:373ece605001244e8d552b563ebfe61e10da2895e0d94beae2d7c707bcd91f83 \ + --hash=sha256:3fa1ae3bfe5afd20d62bc841a2ff410df1dbf6412d146cbe5768ea7f40a0df28 \ + --hash=sha256:464cef231aafa9f598ef0a6d0a21a8829272b674a19091b8d12caedd829dbb5b \ + --hash=sha256:5afb4a49df2eec8cbc0055b409f78be840f6fb33bc28357c5990e39cd6d99239 \ + --hash=sha256:5b73bebe892bc886f6336c856126e1b6e094c2a87fbed2a9acceef90c4283cb5 \ + --hash=sha256:6b94bcc6263326643f2bed2f53ea158875cf31db8d3588a49b73abad70d52614 \ + --hash=sha256:701229af97308a38f3b5a7e79a5fa9ae0a74c96f217ece100b4b65f98741b76d \ + --hash=sha256:78a2cb9f332a5330041acca870918a4d29653a4686efd9f59dcc03f73dae7778 \ + --hash=sha256:7e51aa472d0013ec877e6554d111fc3b9d71d499cb9f51a34a699b6f7480b0b5 \ + --hash=sha256:7e86dd5ba44bbbb7b4ab420191f681b05fcaf646c17ea7ed5d39e61296e4df12 \ + --hash=sha256:82c10168ff18d9e13e55f3f15ccd7c0c548e1be81a864d88dbe9d25014bf834d \ + --hash=sha256:89c51487a7d6e125b56b677c06235646c9ac15e08dbd7399d0ee422f289fcf62 \ + --hash=sha256:8cdbb3615964cb06e2f79b1613abbe0529a405eed4961d19d2f63b79d4f5f1fc \ + --hash=sha256:a2385a1ca24f51b0607888f632e8fa2fb0fc47b548eadf76c4456e259572480b \ + --hash=sha256:b0c4397ab58736e1d0d130a47aa68d8b84167a7af9ef9825f1cf54a4df417ea3 \ + --hash=sha256:c65097ad7eee9152b484bb61dd87e465373bc0189b72b6a64325d75a808b0295 \ + --hash=sha256:c76d3156a46882ee31db74a48533fec6e41937d27cc1944efaff752e5121d56d \ + --hash=sha256:d705ff78881bbbedf5db54342a19778cd2f141697548cd8fb418024efdb10bbd \ + --hash=sha256:d924a9eccd400b667c10ece6a72a8203b80093e66f620e5f2c9f5e4b288d1e12 \ + --hash=sha256:dc6e5f7d2776fae35b5bee5114e4842ee4bd449557ad0e0feba52963fe88acf5 \ + --hash=sha256:e1060bc736c5080d6367351e8367f25171d572f8f01c54a72ffd8337ef512144 \ + --hash=sha256:e135991897b072725b7c5b332dc4156cc3cfd65b1c82fcf2dbd373d4e306ade0 \ + --hash=sha256:e2ce76a3fd0ec5af184daad67cd37aa51d3745f676f55f10dc086cd528d7d3b7 \ + --hash=sha256:e6d7099b8e9a7e0e7375da8f462162604d4e8cd2cf9c1a647d79b80e5e242d27 \ + --hash=sha256:ea5c8ff7adff37ddac6813fd43a74a5236afed96585e908304a7522300e59e92 \ + --hash=sha256:eab4823438a95b849699de02cb8780d4853b0c9de89548dd1ff4e3327371d7d4 \ + --hash=sha256:f13671b1ded6b03baecfabbaec81b7bca38f5624cf0b74981eb1a61761e352b7 +pillow-heif==0.7.2 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:01dbea3cf8f99bc1f074b05a2bf701ccc70858503f9d8bb017806cfc0c1d2511 \ + --hash=sha256:05832c17036f4457430ea095d69c2b9b910a4c3f29be879413dd807fd9d3f2a5 \ + --hash=sha256:092c65ec024a6fd795477a7b28cd8f04bef20b62c3f9f92e3c65031b3fbee7f7 \ + --hash=sha256:0c00d560b61c155fd1a455c324b5890333e4d3d93371e0dc58b2f0b85a88ecf4 \ + --hash=sha256:0c6027c2e6916d730f82793ce558ccb925540868b89a4497e16e4ab7bf7d58a8 \ + --hash=sha256:11f2146884431cc3a8cb155470999358179dd6eeabdd8aa58e74cf33e65c2991 \ + --hash=sha256:1281e1211cfdbd65c54c2320ba8dc7c573c4f3921b39bceb804f30607e4a4fd2 \ + --hash=sha256:144cf46ba39574af3ab547beb8fbccb8bd518ffd6cac28c7c656f43875a9c3e2 \ + --hash=sha256:1f0d3292f201e674255b367aa92115fe804b9c93fa84fc043578eb939f0b3e34 \ + --hash=sha256:51aefb21c6d65bad35bf02109c6de30a816ebbd6bfedf4ec7ab546c149d09134 \ + --hash=sha256:557de526d4df837350572b1861da87b134b15719200edacb3a6003bf799f69ca \ + --hash=sha256:8cc5f8d24ac9e076809e460eca3c292a0f0338a8b4046b6adaf0b5673df9a738 \ + --hash=sha256:a37326c48998511c42d007541a01a38cb4e5f46d4569f2dd52614a7d3726fdcb \ + --hash=sha256:caf82157fc9be8bea9ddb4fb1ff2d0d6bc66cb850a0fc627a4acf097d1ad9b42 \ + --hash=sha256:d825a955fe2a464a0aa8071e7fe64376c21112fadf9db7266c4d7ec7ff3998fe \ + --hash=sha256:eec11f64946345407319bfc5f3703dfd93cf58565d968737e0857ae4a8ba7147 \ + --hash=sha256:efbc7708c818de8922f526727c804f85840a3a4ab0ca7f6a5ff42bab657691b7 \ + --hash=sha256:f1a3cd6deaf58d1df4adbe94fb4b106146985c50f26b5e9fda0a222632391017 \ + --hash=sha256:f8bfc0c4409a67e8bdd0f0036072132155ed37f8c2a3d7824803b57f3eb91630 +pillow==9.4.0 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:013016af6b3a12a2f40b704677f8b51f72cb007dac785a9933d5c86a72a7fe33 \ + --hash=sha256:0845adc64fe9886db00f5ab68c4a8cd933ab749a87747555cec1c95acea64b0b \ + --hash=sha256:0884ba7b515163a1a05440a138adeb722b8a6ae2c2b33aea93ea3118dd3a899e \ + --hash=sha256:09b89ddc95c248ee788328528e6a2996e09eaccddeeb82a5356e92645733be35 \ + --hash=sha256:0dd4c681b82214b36273c18ca7ee87065a50e013112eea7d78c7a1b89a739153 \ + --hash=sha256:0e51f608da093e5d9038c592b5b575cadc12fd748af1479b5e858045fff955a9 \ + --hash=sha256:0f3269304c1a7ce82f1759c12ce731ef9b6e95b6df829dccd9fe42912cc48569 \ + --hash=sha256:16a8df99701f9095bea8a6c4b3197da105df6f74e6176c5b410bc2df2fd29a57 \ + --hash=sha256:19005a8e58b7c1796bc0167862b1f54a64d3b44ee5d48152b06bb861458bc0f8 \ + --hash=sha256:1b4b4e9dda4f4e4c4e6896f93e84a8f0bcca3b059de9ddf67dac3c334b1195e1 \ + --hash=sha256:28676836c7796805914b76b1837a40f76827ee0d5398f72f7dcc634bae7c6264 \ + --hash=sha256:2968c58feca624bb6c8502f9564dd187d0e1389964898f5e9e1fbc8533169157 \ + --hash=sha256:3f4cc516e0b264c8d4ccd6b6cbc69a07c6d582d8337df79be1e15a5056b258c9 \ + --hash=sha256:3fa1284762aacca6dc97474ee9c16f83990b8eeb6697f2ba17140d54b453e133 \ + --hash=sha256:43521ce2c4b865d385e78579a082b6ad1166ebed2b1a2293c3be1d68dd7ca3b9 \ + --hash=sha256:451f10ef963918e65b8869e17d67db5e2f4ab40e716ee6ce7129b0cde2876eab \ + --hash=sha256:46c259e87199041583658457372a183636ae8cd56dbf3f0755e0f376a7f9d0e6 \ + --hash=sha256:46f39cab8bbf4a384ba7cb0bc8bae7b7062b6a11cfac1ca4bc144dea90d4a9f5 \ + --hash=sha256:519e14e2c49fcf7616d6d2cfc5c70adae95682ae20f0395e9280db85e8d6c4df \ + --hash=sha256:53dcb50fbdc3fb2c55431a9b30caeb2f7027fcd2aeb501459464f0214200a503 \ + --hash=sha256:54614444887e0d3043557d9dbc697dbb16cfb5a35d672b7a0fcc1ed0cf1c600b \ + --hash=sha256:575d8912dca808edd9acd6f7795199332696d3469665ef26163cd090fa1f8bfa \ + --hash=sha256:5dd5a9c3091a0f414a963d427f920368e2b6a4c2f7527fdd82cde8ef0bc7a327 \ + --hash=sha256:5f532a2ad4d174eb73494e7397988e22bf427f91acc8e6ebf5bb10597b49c493 \ + --hash=sha256:60e7da3a3ad1812c128750fc1bc14a7ceeb8d29f77e0a2356a8fb2aa8925287d \ + --hash=sha256:653d7fb2df65efefbcbf81ef5fe5e5be931f1ee4332c2893ca638c9b11a409c4 \ + --hash=sha256:6663977496d616b618b6cfa43ec86e479ee62b942e1da76a2c3daa1c75933ef4 \ + --hash=sha256:6abfb51a82e919e3933eb137e17c4ae9c0475a25508ea88993bb59faf82f3b35 \ + --hash=sha256:6c6b1389ed66cdd174d040105123a5a1bc91d0aa7059c7261d20e583b6d8cbd2 \ + --hash=sha256:6d9dfb9959a3b0039ee06c1a1a90dc23bac3b430842dcb97908ddde05870601c \ + --hash=sha256:765cb54c0b8724a7c12c55146ae4647e0274a839fb6de7bcba841e04298e1011 \ + --hash=sha256:7a21222644ab69ddd9967cfe6f2bb420b460dae4289c9d40ff9a4896e7c35c9a \ + --hash=sha256:7ac7594397698f77bce84382929747130765f66406dc2cd8b4ab4da68ade4c6e \ + --hash=sha256:7cfc287da09f9d2a7ec146ee4d72d6ea1342e770d975e49a8621bf54eaa8f30f \ + --hash=sha256:83125753a60cfc8c412de5896d10a0a405e0bd88d0470ad82e0869ddf0cb3848 \ + --hash=sha256:847b114580c5cc9ebaf216dd8c8dbc6b00a3b7ab0131e173d7120e6deade1f57 \ + --hash=sha256:87708d78a14d56a990fbf4f9cb350b7d89ee8988705e58e39bdf4d82c149210f \ + --hash=sha256:8a2b5874d17e72dfb80d917213abd55d7e1ed2479f38f001f264f7ce7bae757c \ + --hash=sha256:8f127e7b028900421cad64f51f75c051b628db17fb00e099eb148761eed598c9 \ + --hash=sha256:94cdff45173b1919350601f82d61365e792895e3c3a3443cf99819e6fbf717a5 \ + --hash=sha256:99d92d148dd03fd19d16175b6d355cc1b01faf80dae93c6c3eb4163709edc0a9 \ + --hash=sha256:9a3049a10261d7f2b6514d35bbb7a4dfc3ece4c4de14ef5876c4b7a23a0e566d \ + --hash=sha256:9d9a62576b68cd90f7075876f4e8444487db5eeea0e4df3ba298ee38a8d067b0 \ + --hash=sha256:9e5f94742033898bfe84c93c831a6f552bb629448d4072dd312306bab3bd96f1 \ + --hash=sha256:a1c2d7780448eb93fbcc3789bf3916aa5720d942e37945f4056680317f1cd23e \ + --hash=sha256:a2e0f87144fcbbe54297cae708c5e7f9da21a4646523456b00cc956bd4c65815 \ + --hash=sha256:a4dfdae195335abb4e89cc9762b2edc524f3c6e80d647a9a81bf81e17e3fb6f0 \ + --hash=sha256:a96e6e23f2b79433390273eaf8cc94fec9c6370842e577ab10dabdcc7ea0a66b \ + --hash=sha256:aabdab8ec1e7ca7f1434d042bf8b1e92056245fb179790dc97ed040361f16bfd \ + --hash=sha256:b222090c455d6d1a64e6b7bb5f4035c4dff479e22455c9eaa1bdd4c75b52c80c \ + --hash=sha256:b52ff4f4e002f828ea6483faf4c4e8deea8d743cf801b74910243c58acc6eda3 \ + --hash=sha256:b70756ec9417c34e097f987b4d8c510975216ad26ba6e57ccb53bc758f490dab \ + --hash=sha256:b8c2f6eb0df979ee99433d8b3f6d193d9590f735cf12274c108bd954e30ca858 \ + --hash=sha256:b9b752ab91e78234941e44abdecc07f1f0d8f51fb62941d32995b8161f68cfe5 \ + --hash=sha256:ba6612b6548220ff5e9df85261bddc811a057b0b465a1226b39bfb8550616aee \ + --hash=sha256:bd752c5ff1b4a870b7661234694f24b1d2b9076b8bf337321a814c612665f343 \ + --hash=sha256:c3c4ed2ff6760e98d262e0cc9c9a7f7b8a9f61aa4d47c58835cdaf7b0b8811bb \ + --hash=sha256:c5c1362c14aee73f50143d74389b2c158707b4abce2cb055b7ad37ce60738d47 \ + --hash=sha256:cb362e3b0976dc994857391b776ddaa8c13c28a16f80ac6522c23d5257156bed \ + --hash=sha256:d197df5489004db87d90b918033edbeee0bd6df3848a204bca3ff0a903bef837 \ + --hash=sha256:d3b56206244dc8711f7e8b7d6cad4663917cd5b2d950799425076681e8766286 \ + --hash=sha256:d5b2f8a31bd43e0f18172d8ac82347c8f37ef3e0b414431157718aa234991b28 \ + --hash=sha256:d7081c084ceb58278dd3cf81f836bc818978c0ccc770cbbb202125ddabec6628 \ + --hash=sha256:db74f5562c09953b2c5f8ec4b7dfd3f5421f31811e97d1dbc0a7c93d6e3a24df \ + --hash=sha256:df41112ccce5d47770a0c13651479fbcd8793f34232a2dd9faeccb75eb5d0d0d \ + --hash=sha256:e1339790c083c5a4de48f688b4841f18df839eb3c9584a770cbd818b33e26d5d \ + --hash=sha256:e621b0246192d3b9cb1dc62c78cfa4c6f6d2ddc0ec207d43c0dedecb914f152a \ + --hash=sha256:e8c5cf126889a4de385c02a2c3d3aba4b00f70234bfddae82a5eaa3ee6d5e3e6 \ + --hash=sha256:e9d7747847c53a16a729b6ee5e737cf170f7a16611c143d95aa60a109a59c336 \ + --hash=sha256:eaef5d2de3c7e9b21f1e762f289d17b726c2239a42b11e25446abf82b26ac132 \ + --hash=sha256:ed3e4b4e1e6de75fdc16d3259098de7c6571b1a6cc863b1a49e7d3d53e036070 \ + --hash=sha256:ef21af928e807f10bf4141cad4746eee692a0dd3ff56cfb25fce076ec3cc8abe \ + --hash=sha256:f09598b416ba39a8f489c124447b007fe865f786a89dbfa48bb5cf395693132a \ + --hash=sha256:f0caf4a5dcf610d96c3bd32932bfac8aee61c96e60481c2a0ea58da435e25acd \ + --hash=sha256:f6e78171be3fb7941f9910ea15b4b14ec27725865a73c15277bc39f5ca4f8391 \ + --hash=sha256:f715c32e774a60a337b2bb8ad9839b4abf75b267a0f18806f6f4f5f1688c4b5a \ + --hash=sha256:fb5c1ad6bad98c57482236a21bf985ab0ef42bd51f7ad4e4538e89a997624e12 +playwright==1.28.0 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:074f73c17971f233903949492f31113bfbc2f1e2e85da7c1c03a15e5008b529f \ + --hash=sha256:265f47aaa42c7986316100f5f468f8654e9a1609c2a2578743e25d058bddc1e6 \ + --hash=sha256:2e101b17e4d5252ef96c9dc8b2ac17f2980dde0420728c1c96a77eeaf6f9b11f \ + --hash=sha256:794b9da616c03354a12e48ddf060a9e776ab59b90662b0131ff74ec1b25739f4 \ + --hash=sha256:8557d92718ce45814aff017fa1774ab92089e40b6c16a8073d5a7c4d583d4aed \ + --hash=sha256:96a2d63954098233bbfc48b874f2a8e7cf0c64d7fcae24469571b0fb90ebe00f \ + --hash=sha256:a21ddd7b6f6afd434a73471f7cd39673286f0ca88b62b756d90264eb7b5a7daf +pluggy==1.0.0 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159 \ + --hash=sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3 +prompt-toolkit==3.0.38 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:23ac5d50538a9a38c8bde05fecb47d0b403ecd0662857a86f886f798563d5b9b \ + --hash=sha256:45ea77a2f7c60418850331366c81cf6b5b9cf4c7fd34616f733c5427e6abbb1f +psycopg2-binary==2.9.5 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:00475004e5ed3e3bf5e056d66e5dcdf41a0dc62efcd57997acd9135c40a08a50 \ + --hash=sha256:01ad49d68dd8c5362e4bfb4158f2896dc6e0c02e87b8a3770fc003459f1a4425 \ + --hash=sha256:024030b13bdcbd53d8a93891a2cf07719715724fc9fee40243f3bd78b4264b8f \ + --hash=sha256:02551647542f2bf89073d129c73c05a25c372fc0a49aa50e0de65c3c143d8bd0 \ + --hash=sha256:043a9fd45a03858ff72364b4b75090679bd875ee44df9c0613dc862ca6b98460 \ + --hash=sha256:05b3d479425e047c848b9782cd7aac9c6727ce23181eb9647baf64ffdfc3da41 \ + --hash=sha256:0775d6252ccb22b15da3b5d7adbbf8cfe284916b14b6dc0ff503a23edb01ee85 \ + --hash=sha256:1764546ffeaed4f9428707be61d68972eb5ede81239b46a45843e0071104d0dd \ + --hash=sha256:1e491e6489a6cb1d079df8eaa15957c277fdedb102b6a68cfbf40c4994412fd0 \ + --hash=sha256:212757ffcecb3e1a5338d4e6761bf9c04f750e7d027117e74aa3cd8a75bb6fbd \ + --hash=sha256:215d6bf7e66732a514f47614f828d8c0aaac9a648c46a831955cb103473c7147 \ + --hash=sha256:25382c7d174c679ce6927c16b6fbb68b10e56ee44b1acb40671e02d29f2fce7c \ + --hash=sha256:2abccab84d057723d2ca8f99ff7b619285d40da6814d50366f61f0fc385c3903 \ + --hash=sha256:2d964eb24c8b021623df1c93c626671420c6efadbdb8655cb2bd5e0c6fa422ba \ + --hash=sha256:2ec46ed947801652c9643e0b1dc334cfb2781232e375ba97312c2fc256597632 \ + --hash=sha256:2ef892cabdccefe577088a79580301f09f2a713eb239f4f9f62b2b29cafb0577 \ + --hash=sha256:33e632d0885b95a8b97165899006c40e9ecdc634a529dca7b991eb7de4ece41c \ + --hash=sha256:3520d7af1ebc838cc6084a3281145d5cd5bdd43fdef139e6db5af01b92596cb7 \ + --hash=sha256:3d790f84201c3698d1bfb404c917f36e40531577a6dda02e45ba29b64d539867 \ + --hash=sha256:3fc33295cfccad697a97a76dec3f1e94ad848b7b163c3228c1636977966b51e2 \ + --hash=sha256:422e3d43b47ac20141bc84b3d342eead8d8099a62881a501e97d15f6addabfe9 \ + --hash=sha256:426c2ae999135d64e6a18849a7d1ad0e1bd007277e4a8f4752eaa40a96b550ff \ + --hash=sha256:46512486be6fbceef51d7660dec017394ba3e170299d1dc30928cbedebbf103a \ + --hash=sha256:46850a640df62ae940e34a163f72e26aca1f88e2da79148e1862faaac985c302 \ + --hash=sha256:484405b883630f3e74ed32041a87456c5e0e63a8e3429aa93e8714c366d62bd1 \ + --hash=sha256:4e7904d1920c0c89105c0517dc7e3f5c20fb4e56ba9cdef13048db76947f1d79 \ + --hash=sha256:56b2957a145f816726b109ee3d4e6822c23f919a7d91af5a94593723ed667835 \ + --hash=sha256:5c6527c8efa5226a9e787507652dd5ba97b62d29b53c371a85cd13f957fe4d42 \ + --hash=sha256:5cbc554ba47ecca8cd3396ddaca85e1ecfe3e48dd57dc5e415e59551affe568e \ + --hash=sha256:5d28ecdf191db558d0c07d0f16524ee9d67896edf2b7990eea800abeb23ebd61 \ + --hash=sha256:5fc447058d083b8c6ac076fc26b446d44f0145308465d745fba93a28c14c9e32 \ + --hash=sha256:63e318dbe52709ed10d516a356f22a635e07a2e34c68145484ed96a19b0c4c68 \ + --hash=sha256:68d81a2fe184030aa0c5c11e518292e15d342a667184d91e30644c9d533e53e1 \ + --hash=sha256:6e63814ec71db9bdb42905c925639f319c80e7909fb76c3b84edc79dadef8d60 \ + --hash=sha256:6f8a9bcab7b6db2e3dbf65b214dfc795b4c6b3bb3af922901b6a67f7cb47d5f8 \ + --hash=sha256:70831e03bd53702c941da1a1ad36c17d825a24fbb26857b40913d58df82ec18b \ + --hash=sha256:74eddec4537ab1f701a1647214734bc52cee2794df748f6ae5908e00771f180a \ + --hash=sha256:7b3751857da3e224f5629400736a7b11e940b5da5f95fa631d86219a1beaafec \ + --hash=sha256:7cf1d44e710ca3a9ce952bda2855830fe9f9017ed6259e01fcd71ea6287565f5 \ + --hash=sha256:7d07f552d1e412f4b4e64ce386d4c777a41da3b33f7098b6219012ba534fb2c2 \ + --hash=sha256:7d88db096fa19d94f433420eaaf9f3c45382da2dd014b93e4bf3215639047c16 \ + --hash=sha256:7ee3095d02d6f38bd7d9a5358fcc9ea78fcdb7176921528dd709cc63f40184f5 \ + --hash=sha256:902844f9c4fb19b17dfa84d9e2ca053d4a4ba265723d62ea5c9c26b38e0aa1e6 \ + --hash=sha256:937880290775033a743f4836aa253087b85e62784b63fd099ee725d567a48aa1 \ + --hash=sha256:95076399ec3b27a8f7fa1cc9a83417b1c920d55cf7a97f718a94efbb96c7f503 \ + --hash=sha256:9c38d3869238e9d3409239bc05bc27d6b7c99c2a460ea337d2814b35fb4fea1b \ + --hash=sha256:9e32cedc389bcb76d9f24ea8a012b3cb8385ee362ea437e1d012ffaed106c17d \ + --hash=sha256:9ffdc51001136b699f9563b1c74cc1f8c07f66ef7219beb6417a4c8aaa896c28 \ + --hash=sha256:a0adef094c49f242122bb145c3c8af442070dc0e4312db17e49058c1702606d4 \ + --hash=sha256:a36a0e791805aa136e9cbd0ffa040d09adec8610453ee8a753f23481a0057af5 \ + --hash=sha256:a7e518a0911c50f60313cb9e74a169a65b5d293770db4770ebf004245f24b5c5 \ + --hash=sha256:af0516e1711995cb08dc19bbd05bec7dbdebf4185f68870595156718d237df3e \ + --hash=sha256:b8104f709590fff72af801e916817560dbe1698028cd0afe5a52d75ceb1fce5f \ + --hash=sha256:b911dfb727e247340d36ae20c4b9259e4a64013ab9888ccb3cbba69b77fd9636 \ + --hash=sha256:b9a794cef1d9c1772b94a72eec6da144c18e18041d294a9ab47669bc77a80c1d \ + --hash=sha256:b9c33d4aef08dfecbd1736ceab8b7b3c4358bf10a0121483e5cd60d3d308cc64 \ + --hash=sha256:b9d38a4656e4e715d637abdf7296e98d6267df0cc0a8e9a016f8ba07e4aa3eeb \ + --hash=sha256:bcda1c84a1c533c528356da5490d464a139b6e84eb77cc0b432e38c5c6dd7882 \ + --hash=sha256:bef7e3f9dc6f0c13afdd671008534be5744e0e682fb851584c8c3a025ec09720 \ + --hash=sha256:c15ba5982c177bc4b23a7940c7e4394197e2d6a424a2d282e7c236b66da6d896 \ + --hash=sha256:c5254cbd4f4855e11cebf678c1a848a3042d455a22a4ce61349c36aafd4c2267 \ + --hash=sha256:c5682a45df7d9642eff590abc73157c887a68f016df0a8ad722dcc0f888f56d7 \ + --hash=sha256:c5e65c6ac0ae4bf5bef1667029f81010b6017795dcb817ba5c7b8a8d61fab76f \ + --hash=sha256:d4c7b3a31502184e856df1f7bbb2c3735a05a8ce0ade34c5277e1577738a5c91 \ + --hash=sha256:d892bfa1d023c3781a3cab8dd5af76b626c483484d782e8bd047c180db590e4c \ + --hash=sha256:dbc332beaf8492b5731229a881807cd7b91b50dbbbaf7fe2faf46942eda64a24 \ + --hash=sha256:dc85b3777068ed30aff8242be2813038a929f2084f69e43ef869daddae50f6ee \ + --hash=sha256:e59137cdb970249ae60be2a49774c6dfb015bd0403f05af1fe61862e9626642d \ + --hash=sha256:e67b3c26e9b6d37b370c83aa790bbc121775c57bfb096c2e77eacca25fd0233b \ + --hash=sha256:e72c91bda9880f097c8aa3601a2c0de6c708763ba8128006151f496ca9065935 \ + --hash=sha256:f95b8aca2703d6a30249f83f4fe6a9abf2e627aa892a5caaab2267d56be7ab69 +pycparser==2.21 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9 \ + --hash=sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206 +pycryptodomex==3.15.0 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:04a5d6a17560e987272fc1763e9772a87689a08427b8cbdebe3ca7cba95d6156 \ + --hash=sha256:04cc393045a8f19dd110c975e30f38ed7ab3faf21ede415ea67afebd95a22380 \ + --hash=sha256:0776bfaf2c48154ab54ea45392847c1283d2fcf64e232e85565f858baedfc1fa \ + --hash=sha256:0fadb9f7fa3150577800eef35f62a8a24b9ddf1563ff060d9bd3af22d3952c8c \ + --hash=sha256:18e2ab4813883ae63396c0ffe50b13554b32bb69ec56f0afaf052e7a7ae0d55b \ + --hash=sha256:191e73bc84a8064ad1874dba0ebadedd7cce4dedee998549518f2c74a003b2e1 \ + --hash=sha256:35a8f7afe1867118330e2e0e0bf759c409e28557fb1fc2fbb1c6c937297dbe9a \ + --hash=sha256:3709f13ca3852b0b07fc04a2c03b379189232b24007c466be0f605dd4723e9d4 \ + --hash=sha256:4540904c09704b6f831059c0dfb38584acb82cb97b0125cd52688c1f1e3fffa6 \ + --hash=sha256:463119d7d22d0fc04a0f9122e9d3e6121c6648bcb12a052b51bd1eed1b996aa2 \ + --hash=sha256:46b3f05f2f7ac7841053da4e0f69616929ca3c42f238c405f6c3df7759ad2780 \ + --hash=sha256:48697790203909fab02a33226fda546604f4e2653f9d47bc5d3eb40879fa7c64 \ + --hash=sha256:5676a132169a1c1a3712edf25250722ebc8c9102aa9abd814df063ca8362454f \ + --hash=sha256:65204412d0c6a8e3c41e21e93a5e6054a74fea501afa03046a388cf042e3377a \ + --hash=sha256:67e1e6a92151023ccdfcfbc0afb3314ad30080793b4c27956ea06ab1fb9bcd8a \ + --hash=sha256:6f5b6ba8aefd624834bc177a2ac292734996bb030f9d1b388e7504103b6fcddf \ + --hash=sha256:7341f1bb2dadb0d1a0047f34c3a58208a92423cdbd3244d998e4b28df5eac0ed \ + --hash=sha256:781efd04ea6762bb2ef7d4fa632c9c89895433744b6c345bd0c239d5ab058dfc \ + --hash=sha256:78d9621cf0ea35abf2d38fa2ca6d0634eab6c991a78373498ab149953787e5e5 \ + --hash=sha256:7db44039cc8b449bd08ab338a074e87093bd170f1a1b76d2fcef8a3e2ee11199 \ + --hash=sha256:8eecdf9cdc7343001d047f951b9cc805cd68cb6cd77b20ea46af5bffc5bd3dfb \ + --hash=sha256:94c7b60e1f52e1a87715571327baea0733708ab4723346598beca4a3b6879794 \ + --hash=sha256:996e1ba717077ce1e6d4849af7a1426f38b07b3d173b879e27d5e26d2e958beb \ + --hash=sha256:a07a64709e366c2041cd5cfbca592b43998bf4df88f7b0ca73dca37071ccf1bd \ + --hash=sha256:b6306403228edde6e289f626a3908a2f7f67c344e712cf7c0a508bab3ad9e381 \ + --hash=sha256:b9279adc16e4b0f590ceff581f53a80179b02cba9056010d733eb4196134a870 \ + --hash=sha256:c4cb9cb492ea7dcdf222a8d19a1d09002798ea516aeae8877245206d27326d86 \ + --hash=sha256:dd452a5af7014e866206d41751886c9b4bf379a339fdf2dbfc7dd16c0fb4f8e0 \ + --hash=sha256:e2b12968522a0358b8917fc7b28865acac002f02f4c4c6020fcb264d76bfd06d \ + --hash=sha256:e3164a18348bd53c69b4435ebfb4ac8a4076291ffa2a70b54f0c4b80c7834b1d \ + --hash=sha256:e47bf8776a7e15576887f04314f5228c6527b99946e6638cf2f16da56d260cab \ + --hash=sha256:f8be976cec59b11f011f790b88aca67b4ea2bd286578d0bd3e31bcd19afcd3e4 \ + --hash=sha256:fc9bc7a9b79fe5c750fc81a307052f8daabb709bdaabb0fb18fb136b66b653b5 +pydyf==0.5.0 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:116bc4b057822dc72d6afc826cad33444f7dcde8059aa85534380fb63e3e306f \ + --hash=sha256:51e751ae1504037c1fc1f4815119137b011802cd5f6c3539db066c455b14a7e1 +pyee==9.0.4 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:2770c4928abc721f46b705e6a72b0c59480c4a69c9a83ca0b00bb994f1ea4b32 \ + --hash=sha256:9f066570130c554e9cc12de5a9d86f57c7ee47fece163bbdaa3e9c933cfbdfa5 +pyjwt==2.6.0 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:69285c7e31fc44f68a1feb309e948e0df53259d579295e6cfe2b1792329f05fd \ + --hash=sha256:d83c3d892a77bbb74d3e1a2cfa90afaadb60945205d1095d9221f04466f64c14 +pyotp==2.8.0 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:889d037fdde6accad28531fc62a790f089e5dfd5b638773e9ee004cce074a2e5 \ + --hash=sha256:c2f5e17d9da92d8ec1f7de6331ab08116b9115adbabcba6e208d46fc49a98c5a +pyphen==0.14.0 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:414c9355958ca3c6a3ff233f65678c245b8ecb56418fb291e2b93499d61cd510 \ + --hash=sha256:596c8b3be1c1a70411ba5f6517d9ccfe3083c758ae2b94a45f2707346d8e66fa +pyrsistent==0.19.3 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:016ad1afadf318eb7911baa24b049909f7f3bb2c5b1ed7b6a8f21db21ea3faa8 \ + --hash=sha256:1a2994773706bbb4995c31a97bc94f1418314923bd1048c6d964837040376440 \ + --hash=sha256:20460ac0ea439a3e79caa1dbd560344b64ed75e85d8703943e0b66c2a6150e4a \ + --hash=sha256:3311cb4237a341aa52ab8448c27e3a9931e2ee09561ad150ba94e4cfd3fc888c \ + --hash=sha256:3a8cb235fa6d3fd7aae6a4f1429bbb1fec1577d978098da1252f0489937786f3 \ + --hash=sha256:3ab2204234c0ecd8b9368dbd6a53e83c3d4f3cab10ecaf6d0e772f456c442393 \ + --hash=sha256:42ac0b2f44607eb92ae88609eda931a4f0dfa03038c44c772e07f43e738bcac9 \ + --hash=sha256:49c32f216c17148695ca0e02a5c521e28a4ee6c5089f97e34fe24163113722da \ + --hash=sha256:4b774f9288dda8d425adb6544e5903f1fb6c273ab3128a355c6b972b7df39dcf \ + --hash=sha256:4c18264cb84b5e68e7085a43723f9e4c1fd1d935ab240ce02c0324a8e01ccb64 \ + --hash=sha256:5a474fb80f5e0d6c9394d8db0fc19e90fa540b82ee52dba7d246a7791712f74a \ + --hash=sha256:64220c429e42a7150f4bfd280f6f4bb2850f95956bde93c6fda1b70507af6ef3 \ + --hash=sha256:878433581fc23e906d947a6814336eee031a00e6defba224234169ae3d3d6a98 \ + --hash=sha256:99abb85579e2165bd8522f0c0138864da97847875ecbd45f3e7e2af569bfc6f2 \ + --hash=sha256:a2471f3f8693101975b1ff85ffd19bb7ca7dd7c38f8a81701f67d6b4f97b87d8 \ + --hash=sha256:aeda827381f5e5d65cced3024126529ddc4289d944f75e090572c77ceb19adbf \ + --hash=sha256:b735e538f74ec31378f5a1e3886a26d2ca6351106b4dfde376a26fc32a044edc \ + --hash=sha256:c147257a92374fde8498491f53ffa8f4822cd70c0d85037e09028e478cababb7 \ + --hash=sha256:c4db1bd596fefd66b296a3d5d943c94f4fac5bcd13e99bffe2ba6a759d959a28 \ + --hash=sha256:c74bed51f9b41c48366a286395c67f4e894374306b197e62810e0fdaf2364da2 \ + --hash=sha256:c9bb60a40a0ab9aba40a59f68214eed5a29c6274c83b2cc206a359c4a89fa41b \ + --hash=sha256:cc5d149f31706762c1f8bda2e8c4f8fead6e80312e3692619a75301d3dbb819a \ + --hash=sha256:ccf0d6bd208f8111179f0c26fdf84ed7c3891982f2edaeae7422575f47e66b64 \ + --hash=sha256:e42296a09e83028b3476f7073fcb69ffebac0e66dbbfd1bd847d61f74db30f19 \ + --hash=sha256:e8f2b814a3dc6225964fa03d8582c6e0b6650d68a232df41e3cc1b66a5d2f8d1 \ + --hash=sha256:f0774bf48631f3a20471dd7c5989657b639fd2d285b861237ea9e82c36a415a9 \ + --hash=sha256:f0e7c4b2f77593871e918be000b96c8107da48444d57005b6a6bc61fb4331b2c +pytest-cov==4.0.0 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:2feb1b751d66a8bd934e5edfa2e961d11309dc37b73b0eabe73b5945fee20f6b \ + --hash=sha256:996b79efde6433cdbd0088872dbc5fb3ed7fe1578b68cdbba634f14bb8dd0470 +pytest-django==4.5.2 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:c60834861933773109334fe5a53e83d1ef4828f2203a1d6a0fa9972f4f75ab3e \ + --hash=sha256:d9076f759bb7c36939dbdd5ae6633c18edfc2902d1a69fdbefd2426b970ce6c2 +pytest-xdist==3.0.2 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:688da9b814370e891ba5de650c9327d1a9d861721a524eb917e620eec3e90291 \ + --hash=sha256:9feb9a18e1790696ea23e1434fa73b325ed4998b0e9fcb221f16fd1945e6df1b +pytest==7.2.2 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:130328f552dcfac0b1cec75c12e3f005619dc5f874f0a06e8ff7263f0ee6225e \ + --hash=sha256:c99ab0c73aceb050f68929bc93af19ab6db0558791c6a0715723abe9d0ade9d4 +python-dateutil==2.8.2 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86 \ + --hash=sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9 +python-decouple==3.6 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:2838cdf77a5cf127d7e8b339ce14c25bceb3af3e674e039d4901ba16359968c7 \ + --hash=sha256:6cf502dc963a5c642ea5ead069847df3d916a6420cad5599185de6bab11d8c2e +python-gnupg==0.5.0 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:345723a03e67b82aba0ea8ae2328b2e4a3906fbe2c18c4082285c3b01068f270 \ + --hash=sha256:70758e387fc0e0c4badbcb394f61acbe68b34970a8fed7e0f7c89469fe17912a +pytz==2022.7.1 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:01a0681c4b9684a28304615eba55d1ab31ae00bf68ec157ec3708a8182dbbcd0 \ + --hash=sha256:78f4f37d8198e0627c5f1143240bb0206b8691d8d7ac6d78fee88b78733f8c4a +qrcode[pil]==7.3.1 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:375a6ff240ca9bd41adc070428b5dfc1dcfbb0f2507f1ac848f6cded38956578 +requests==2.28.2 ; python_version >= "3.10" and python_version < "4" \ + --hash=sha256:64299f4909223da747622c030b781c0d7811e359c37124b4bd368fb8c6518baa \ + --hash=sha256:98b1b2782e3c6c4904938b84c0eb932721069dfdb9134313beff7c83c2df24bf +rfc3986[idna2008]==1.5.0 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:270aaf10d87d0d4e095063c65bf3ddbc6ee3d0b226328ce21e036f946e421835 \ + --hash=sha256:a86d6e1f5b1dc238b218b012df0aa79409667bb209e58da56d0b94704e712a97 +s3transfer==0.6.0 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:06176b74f3a15f61f1b4f25a1fc29a4429040b7647133a463da8fa5bd28d5ecd \ + --hash=sha256:2ed07d3866f523cc561bf4a00fc5535827981b117dd7876f036b0c1aca42c947 +setuptools==67.6.0 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:2ee892cd5f29f3373097f5a814697e397cf3ce313616df0af11231e2ad118077 \ + --hash=sha256:b78aaa36f6b90a074c1fa651168723acbf45d14cb1196b6f02c0fd07f17623b2 +six==1.16.0 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \ + --hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254 +sniffio==1.3.0 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101 \ + --hash=sha256:eecefdce1e5bbfb7ad2eeaabf7c1eeb404d7757c379bd1f7e5cce9d8bf425384 +sqlparse==0.4.3 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:0323c0ec29cd52bceabc1b4d9d579e311f3e4961b98d174201d5622a23b85e34 \ + --hash=sha256:69ca804846bb114d2ec380e4360a8a340db83f0ccf3afceeb1404df028f57268 +tinycss2==1.2.1 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:2b80a96d41e7c3914b8cda8bc7f705a4d9c49275616e886103dd839dfc847847 \ + --hash=sha256:8cff3a8f066c2ec677c06dbc7b45619804a6938478d9d73c284b29d14ecb0627 +tomli==2.0.1 ; python_version >= "3.10" and python_full_version <= "3.11.0a6" \ + --hash=sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc \ + --hash=sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f +typing-extensions==4.5.0 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:5cb5f4a79139d699607b3ef622a1dedafa84e115ab0024e0d9c044a9479ca7cb \ + --hash=sha256:fb33085c39dd998ac16d1431ebc293a8b3eedd00fd4a32de0ff79002c19511b4 +tzdata==2022.7 ; python_version >= "3.10" and python_version < "4.0" and sys_platform == "win32" \ + --hash=sha256:2b88858b0e3120792a3c0635c23daf36a7d7eeeca657c323da299d2094402a0d \ + --hash=sha256:fe5f866eddd8b96e9fcba978f8e503c909b19ea7efda11e52e39494bad3a7bfa +urllib3==1.26.15 ; python_version >= "3.10" and python_version < "4" \ + --hash=sha256:8a388717b9476f934a21484e8c8e61875ab60644d29b9b39e11e4b9dc1c6b305 \ + --hash=sha256:aa751d169e23c7479ce47a0cb0da579e3ede798f994f5816a74e4f4500dcea42 +uvicorn==0.20.0 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:a4e12017b940247f836bc90b72e725d7dfd0c8ed1c51eb365f5ba30d9f5127d8 \ + --hash=sha256:c3ed1598a5668208723f2bb49336f4509424ad198d6ab2615b7783db58d919fd +vine==5.0.0 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:4c9dceab6f76ed92105027c49c823800dd33cacce13bdedc5b914e3514b7fb30 \ + --hash=sha256:7d3b1624a953da82ef63462013bbd271d3eb75751489f9807598e8f340bd637e +watchdog==2.2.0 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:1893d425ef4fb4f129ee8ef72226836619c2950dd0559bba022b0818c63a7b60 \ + --hash=sha256:1a410dd4d0adcc86b4c71d1317ba2ea2c92babaf5b83321e4bde2514525544d5 \ + --hash=sha256:1f2b0665c57358ce9786f06f5475bc083fea9d81ecc0efa4733fd0c320940a37 \ + --hash=sha256:1f8eca9d294a4f194ce9df0d97d19b5598f310950d3ac3dd6e8d25ae456d4c8a \ + --hash=sha256:27e49268735b3c27310883012ab3bd86ea0a96dcab90fe3feb682472e30c90f3 \ + --hash=sha256:28704c71afdb79c3f215c90231e41c52b056ea880b6be6cee035c6149d658ed1 \ + --hash=sha256:2ac0bd7c206bb6df78ef9e8ad27cc1346f2b41b1fef610395607319cdab89bc1 \ + --hash=sha256:2af1a29fd14fc0a87fb6ed762d3e1ae5694dcde22372eebba50e9e5be47af03c \ + --hash=sha256:3a048865c828389cb06c0bebf8a883cec3ae58ad3e366bcc38c61d8455a3138f \ + --hash=sha256:441024df19253bb108d3a8a5de7a186003d68564084576fecf7333a441271ef7 \ + --hash=sha256:56fb3f40fc3deecf6e518303c7533f5e2a722e377b12507f6de891583f1b48aa \ + --hash=sha256:619d63fa5be69f89ff3a93e165e602c08ed8da402ca42b99cd59a8ec115673e1 \ + --hash=sha256:74535e955359d79d126885e642d3683616e6d9ab3aae0e7dcccd043bd5a3ff4f \ + --hash=sha256:76a2743402b794629a955d96ea2e240bd0e903aa26e02e93cd2d57b33900962b \ + --hash=sha256:83cf8bc60d9c613b66a4c018051873d6273d9e45d040eed06d6a96241bd8ec01 \ + --hash=sha256:920a4bda7daa47545c3201a3292e99300ba81ca26b7569575bd086c865889090 \ + --hash=sha256:9e99c1713e4436d2563f5828c8910e5ff25abd6ce999e75f15c15d81d41980b6 \ + --hash=sha256:a5bd9e8656d07cae89ac464ee4bcb6f1b9cecbedc3bf1334683bed3d5afd39ba \ + --hash=sha256:ad0150536469fa4b693531e497ffe220d5b6cd76ad2eda474a5e641ee204bbb6 \ + --hash=sha256:af4b5c7ba60206759a1d99811b5938ca666ea9562a1052b410637bb96ff97512 \ + --hash=sha256:c7bd98813d34bfa9b464cf8122e7d4bec0a5a427399094d2c17dd5f70d59bc61 \ + --hash=sha256:ceaa9268d81205876bedb1069f9feab3eccddd4b90d9a45d06a0df592a04cae9 \ + --hash=sha256:cf05e6ff677b9655c6e9511d02e9cc55e730c4e430b7a54af9c28912294605a4 \ + --hash=sha256:d0fb5f2b513556c2abb578c1066f5f467d729f2eb689bc2db0739daf81c6bb7e \ + --hash=sha256:d6ae890798a3560688b441ef086bb66e87af6b400a92749a18b856a134fc0318 \ + --hash=sha256:e5aed2a700a18c194c39c266900d41f3db0c1ebe6b8a0834b9995c835d2ca66e \ + --hash=sha256:e722755d995035dd32177a9c633d158f2ec604f2a358b545bba5bed53ab25bca \ + --hash=sha256:ed91c3ccfc23398e7aa9715abf679d5c163394b8cad994f34f156d57a7c163dc +wcwidth==0.2.6 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:795b138f6875577cd91bba52baf9e445cd5118fd32723b460e30a0af30ea230e \ + --hash=sha256:a5220780a404dbe3353789870978e472cfe477761f06ee55077256e509b156d0 +weasyprint==57.2 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:685692e7a2b4aec21010a61c1e8f1d6cd611dd46509fa7068fcca3c8e59a94f5 \ + --hash=sha256:b8e9ef2dcbcfbc82e99215acfd68f947b2b5f1999b5b156db75fabe380ba7e9a +webencodings==0.5.1 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78 \ + --hash=sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923 +whitenoise==6.2.0 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:8e9c600a5c18bd17655ef668ad55b5edf6c24ce9bdca5bf607649ca4b1e8e2c2 \ + --hash=sha256:8fa943c6d4cd9e27673b70c21a07b0aa120873901e099cd46cab40f7cc96d567 +wrapt==1.15.0 ; python_version >= "3.10" and python_version < "4" \ + --hash=sha256:02fce1852f755f44f95af51f69d22e45080102e9d00258053b79367d07af39c0 \ + --hash=sha256:077ff0d1f9d9e4ce6476c1a924a3332452c1406e59d90a2cf24aeb29eeac9420 \ + --hash=sha256:078e2a1a86544e644a68422f881c48b84fef6d18f8c7a957ffd3f2e0a74a0d4a \ + --hash=sha256:0970ddb69bba00670e58955f8019bec4a42d1785db3faa043c33d81de2bf843c \ + --hash=sha256:1286eb30261894e4c70d124d44b7fd07825340869945c79d05bda53a40caa079 \ + --hash=sha256:21f6d9a0d5b3a207cdf7acf8e58d7d13d463e639f0c7e01d82cdb671e6cb7923 \ + --hash=sha256:230ae493696a371f1dbffaad3dafbb742a4d27a0afd2b1aecebe52b740167e7f \ + --hash=sha256:26458da5653aa5b3d8dc8b24192f574a58984c749401f98fff994d41d3f08da1 \ + --hash=sha256:2cf56d0e237280baed46f0b5316661da892565ff58309d4d2ed7dba763d984b8 \ + --hash=sha256:2e51de54d4fb8fb50d6ee8327f9828306a959ae394d3e01a1ba8b2f937747d86 \ + --hash=sha256:2fbfbca668dd15b744418265a9607baa970c347eefd0db6a518aaf0cfbd153c0 \ + --hash=sha256:38adf7198f8f154502883242f9fe7333ab05a5b02de7d83aa2d88ea621f13364 \ + --hash=sha256:3a8564f283394634a7a7054b7983e47dbf39c07712d7b177b37e03f2467a024e \ + --hash=sha256:3abbe948c3cbde2689370a262a8d04e32ec2dd4f27103669a45c6929bcdbfe7c \ + --hash=sha256:3bbe623731d03b186b3d6b0d6f51865bf598587c38d6f7b0be2e27414f7f214e \ + --hash=sha256:40737a081d7497efea35ab9304b829b857f21558acfc7b3272f908d33b0d9d4c \ + --hash=sha256:41d07d029dd4157ae27beab04d22b8e261eddfc6ecd64ff7000b10dc8b3a5727 \ + --hash=sha256:46ed616d5fb42f98630ed70c3529541408166c22cdfd4540b88d5f21006b0eff \ + --hash=sha256:493d389a2b63c88ad56cdc35d0fa5752daac56ca755805b1b0c530f785767d5e \ + --hash=sha256:4ff0d20f2e670800d3ed2b220d40984162089a6e2c9646fdb09b85e6f9a8fc29 \ + --hash=sha256:54accd4b8bc202966bafafd16e69da9d5640ff92389d33d28555c5fd4f25ccb7 \ + --hash=sha256:56374914b132c702aa9aa9959c550004b8847148f95e1b824772d453ac204a72 \ + --hash=sha256:578383d740457fa790fdf85e6d346fda1416a40549fe8db08e5e9bd281c6a475 \ + --hash=sha256:58d7a75d731e8c63614222bcb21dd992b4ab01a399f1f09dd82af17bbfc2368a \ + --hash=sha256:5c5aa28df055697d7c37d2099a7bc09f559d5053c3349b1ad0c39000e611d317 \ + --hash=sha256:5fc8e02f5984a55d2c653f5fea93531e9836abbd84342c1d1e17abc4a15084c2 \ + --hash=sha256:63424c681923b9f3bfbc5e3205aafe790904053d42ddcc08542181a30a7a51bd \ + --hash=sha256:64b1df0f83706b4ef4cfb4fb0e4c2669100fd7ecacfb59e091fad300d4e04640 \ + --hash=sha256:74934ebd71950e3db69960a7da29204f89624dde411afbfb3b4858c1409b1e98 \ + --hash=sha256:75669d77bb2c071333417617a235324a1618dba66f82a750362eccbe5b61d248 \ + --hash=sha256:75760a47c06b5974aa5e01949bf7e66d2af4d08cb8c1d6516af5e39595397f5e \ + --hash=sha256:76407ab327158c510f44ded207e2f76b657303e17cb7a572ffe2f5a8a48aa04d \ + --hash=sha256:76e9c727a874b4856d11a32fb0b389afc61ce8aaf281ada613713ddeadd1cfec \ + --hash=sha256:77d4c1b881076c3ba173484dfa53d3582c1c8ff1f914c6461ab70c8428b796c1 \ + --hash=sha256:780c82a41dc493b62fc5884fb1d3a3b81106642c5c5c78d6a0d4cbe96d62ba7e \ + --hash=sha256:7dc0713bf81287a00516ef43137273b23ee414fe41a3c14be10dd95ed98a2df9 \ + --hash=sha256:7eebcdbe3677e58dd4c0e03b4f2cfa346ed4049687d839adad68cc38bb559c92 \ + --hash=sha256:896689fddba4f23ef7c718279e42f8834041a21342d95e56922e1c10c0cc7afb \ + --hash=sha256:96177eb5645b1c6985f5c11d03fc2dbda9ad24ec0f3a46dcce91445747e15094 \ + --hash=sha256:96e25c8603a155559231c19c0349245eeb4ac0096fe3c1d0be5c47e075bd4f46 \ + --hash=sha256:9d37ac69edc5614b90516807de32d08cb8e7b12260a285ee330955604ed9dd29 \ + --hash=sha256:9ed6aa0726b9b60911f4aed8ec5b8dd7bf3491476015819f56473ffaef8959bd \ + --hash=sha256:a487f72a25904e2b4bbc0817ce7a8de94363bd7e79890510174da9d901c38705 \ + --hash=sha256:a4cbb9ff5795cd66f0066bdf5947f170f5d63a9274f99bdbca02fd973adcf2a8 \ + --hash=sha256:a74d56552ddbde46c246b5b89199cb3fd182f9c346c784e1a93e4dc3f5ec9975 \ + --hash=sha256:a89ce3fd220ff144bd9d54da333ec0de0399b52c9ac3d2ce34b569cf1a5748fb \ + --hash=sha256:abd52a09d03adf9c763d706df707c343293d5d106aea53483e0ec8d9e310ad5e \ + --hash=sha256:abd8f36c99512755b8456047b7be10372fca271bf1467a1caa88db991e7c421b \ + --hash=sha256:af5bd9ccb188f6a5fdda9f1f09d9f4c86cc8a539bd48a0bfdc97723970348418 \ + --hash=sha256:b02f21c1e2074943312d03d243ac4388319f2456576b2c6023041c4d57cd7019 \ + --hash=sha256:b06fa97478a5f478fb05e1980980a7cdf2712015493b44d0c87606c1513ed5b1 \ + --hash=sha256:b0724f05c396b0a4c36a3226c31648385deb6a65d8992644c12a4963c70326ba \ + --hash=sha256:b130fe77361d6771ecf5a219d8e0817d61b236b7d8b37cc045172e574ed219e6 \ + --hash=sha256:b56d5519e470d3f2fe4aa7585f0632b060d532d0696c5bdfb5e8319e1d0f69a2 \ + --hash=sha256:b67b819628e3b748fd3c2192c15fb951f549d0f47c0449af0764d7647302fda3 \ + --hash=sha256:ba1711cda2d30634a7e452fc79eabcadaffedf241ff206db2ee93dd2c89a60e7 \ + --hash=sha256:bbeccb1aa40ab88cd29e6c7d8585582c99548f55f9b2581dfc5ba68c59a85752 \ + --hash=sha256:bd84395aab8e4d36263cd1b9308cd504f6cf713b7d6d3ce25ea55670baec5416 \ + --hash=sha256:c99f4309f5145b93eca6e35ac1a988f0dc0a7ccf9ccdcd78d3c0adf57224e62f \ + --hash=sha256:ca1cccf838cd28d5a0883b342474c630ac48cac5df0ee6eacc9c7290f76b11c1 \ + --hash=sha256:cd525e0e52a5ff16653a3fc9e3dd827981917d34996600bbc34c05d048ca35cc \ + --hash=sha256:cdb4f085756c96a3af04e6eca7f08b1345e94b53af8921b25c72f096e704e145 \ + --hash=sha256:ce42618f67741d4697684e501ef02f29e758a123aa2d669e2d964ff734ee00ee \ + --hash=sha256:d06730c6aed78cee4126234cf2d071e01b44b915e725a6cb439a879ec9754a3a \ + --hash=sha256:d5fe3e099cf07d0fb5a1e23d399e5d4d1ca3e6dfcbe5c8570ccff3e9208274f7 \ + --hash=sha256:d6bcbfc99f55655c3d93feb7ef3800bd5bbe963a755687cbf1f490a71fb7794b \ + --hash=sha256:d787272ed958a05b2c86311d3a4135d3c2aeea4fc655705f074130aa57d71653 \ + --hash=sha256:e169e957c33576f47e21864cf3fc9ff47c223a4ebca8960079b8bd36cb014fd0 \ + --hash=sha256:e20076a211cd6f9b44a6be58f7eeafa7ab5720eb796975d0c03f05b47d89eb90 \ + --hash=sha256:e826aadda3cae59295b95343db8f3d965fb31059da7de01ee8d1c40a60398b29 \ + --hash=sha256:eef4d64c650f33347c1f9266fa5ae001440b232ad9b98f1f43dfe7a79435c0a6 \ + --hash=sha256:f2e69b3ed24544b0d3dbe2c5c0ba5153ce50dcebb576fdc4696d52aa22db6034 \ + --hash=sha256:f87ec75864c37c4c6cb908d282e1969e79763e0d9becdfe9fe5473b7bb1e5f09 \ + --hash=sha256:fbec11614dba0424ca72f4e8ba3c420dba07b4a7c206c8c8e4e73f2e98f4c559 \ + --hash=sha256:fd69666217b62fa5d7c6aa88e507493a34dec4fa20c5bd925e4bc12fce586639 +zipstream-new==1.1.8 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:0662eb3ebe764fa168a5883cd8819ef83b94bd9e39955537188459d2264a7f60 \ + --hash=sha256:b031fe181b94e51678389d26b174bc76382605a078d7d5d8f5beae083f111c76 +zopfli==0.2.2 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:00a66579f2e663cd7eabad71f5b114abf442f4816fdaf251b4b495aa9d016a67 \ + --hash=sha256:01e82e6e31cfcb2eb7e3d6d72d0a498d150e3c3112cae3b5ab88ca3efedbc162 \ + --hash=sha256:02a0c37826c0b28454865fdf664d54627fe8d90fac6f7325b5215719e8be09ca \ + --hash=sha256:093a58fdf1e592f01233fc16900ceb69f27f19b347deb49544df96d912664f6d \ + --hash=sha256:13487519e6ee8ed36c4a197d146d8ae60d418172d85342d3cdd28f38f905a705 \ + --hash=sha256:1360df0d423c897164a3344ed6635f7fd098cb4ce59c6d45b4275b93727d57f6 \ + --hash=sha256:17694cfda43fb2af18b571bfc60426fb67d7701d75cc1f0e634ad0a19ffaebdd \ + --hash=sha256:1e3aefca003cbb41a6dcdd61f920c807eea99d0196aff488f02275c3b3c400a9 \ + --hash=sha256:20b02b5c9f1cfbcfc154e54981d1b9f9581ca1f54ece39c6aed52f7166a6f081 \ + --hash=sha256:259f15d65e554b16a6086bfe96dd7bd175467eb3d024b9dbce41323b5861a285 \ + --hash=sha256:2b4b5ae717dc2c164d9fae6134eac285915aaef77723f8cf9765555ac926f6d0 \ + --hash=sha256:2bafc105065fae35bd96100a5901a7d816f1904eb732d94b6d46cf480ead581b \ + --hash=sha256:2ce7cbe8f6fff013aa695d5d92ac2b1fd46fd012858109fdde9824759b566685 \ + --hash=sha256:2d49db7540d9991976af464ebc1b9ed12988c04d90691bcb51dc4a373a9e2afc \ + --hash=sha256:2d5e81fed8ac2d71832177ab06385f032cc3a37eec76537d105b1018b7fef0ff \ + --hash=sha256:2e5b7874dfe228715569940561cdc0485ed8cbfd2c76eebc4e54719e0c9cc494 \ + --hash=sha256:2ea855a740ee766c872cbf84abdcc1b6a51b5dbdeb6ace995f36c934b3846467 \ + --hash=sha256:33c876d311c5edc700ccf75a22d03dcda1efa85b43f733913a99b5f3d1eb4ea7 \ + --hash=sha256:3b58455a9d23f6d45f2686891d7bec916132aed335052459bbed36a2b9437c1d \ + --hash=sha256:3df7ae869dcb8e0bb3292e6ab041d16323af37d87c8dca1dde7b2fe5cb6b7cf7 \ + --hash=sha256:4205bb3aea31f22cd52bd1a9c298944591bfd9b6f92ede0af99127750b27eb3b \ + --hash=sha256:468c4317aca9411b576a27f6f26217bdd30e04fdfc420d3d7e8b6f1fef4e9886 \ + --hash=sha256:47d9ec1ca32240fae8b9b41e90d6483f4d0f2946de4785f54f4f57afe83040be \ + --hash=sha256:4b471e3f58bd7b77cfc7a29b28a10c094ea4cd9ee14c54fbc4f1150680aac68c \ + --hash=sha256:4cbc6192bf24425c757281c7c864012e51d29095771f805ea3040702c10c3d7a \ + --hash=sha256:4fcc34fd420ec5750f9981db43ee9a4f2e2bfabdc52128b243fca1fd9b99e13d \ + --hash=sha256:537da300842f06470c036d6d7e7fc9e63713735ee0b96ee97a750d1ec0399639 \ + --hash=sha256:573ae7e1cb4f0c9a248c203440950b24b213c13b5169e169a884c777ad9054e4 \ + --hash=sha256:58ddab571a77988bc585e1a6fa46f9848b45880fa74bc832b135cbc22d22a619 \ + --hash=sha256:5aea70d124ff9c0a33078f1451dfa2dd29eba53ea0627acb88783a19f0692044 \ + --hash=sha256:691d4e0fd04e85ee5f59e019ed0da16d8f544904d3879a34986722d87a90c536 \ + --hash=sha256:69411d85ed25ea25f480410048b397abc4c98562ce3533ecc3ce65358acc52dd \ + --hash=sha256:7146c58c5ff604e7798d4c015c0ca8da53128ca29d0f1bccb48c785953451cd4 \ + --hash=sha256:71eafbe6ce975f77a5247bf44fdfdb78e846a76a3391de4d75cc68ea74542048 \ + --hash=sha256:81c2c1216814a4f2f9abcd49fd4b70f05266d3621ef3b21e4b1b7bf535876fc1 \ + --hash=sha256:8563e639534201a14c109c54965f8a71574d8cf525a0a521d310e044d81fece9 \ + --hash=sha256:898daa330577101aab03806231e9b29990ebaa34f275d9df2045d0551edd1e87 \ + --hash=sha256:8c1b316a5eed59a9a49a886aeeaf3b7233627a1013b10f230817870278e15789 \ + --hash=sha256:8c2e6d0618e1ffc27a1eaf66662f96e0bc8a4c1926fc139a0f544b93a1e1b451 \ + --hash=sha256:8d6d02e1a962995c380411cc4ec81d1f4fc60c293764f8acd859eb12bfdf7190 \ + --hash=sha256:8e05a2506e8a8d44835a11d5f1c296035d65d0f7053f77730ce99066acaf09af \ + --hash=sha256:9de02f057ed153c9f523e72a366b8f48e2634c9f867e7109232415efe11d36c2 \ + --hash=sha256:a568f09aa932a04073a4147e2db5db2adfccd864326477d58d4ffc80550531c7 \ + --hash=sha256:a712fdc3dab61037fab549ff72539b7968ffda567e5460aa2518e40a13b4dd38 \ + --hash=sha256:a85d500cfa06f127e441e90804556a3872ea329e065d2f0ee97922d03afc9885 \ + --hash=sha256:a989893b20381be266a2385f4a1b77316e0df4258ee048bb190c2e426e39cbc8 \ + --hash=sha256:b5b2e2ac397a71772fbbdc5b31fa8257e46f2a1e718e5c17c08db3dac7c739e4 \ + --hash=sha256:bd661f0894a4e4d78ce4c07e2625b0fd17ae172040ce57c5e1c32316a16727c9 \ + --hash=sha256:bd7b174fef2366723f57d16f3e8d157f9cbb53b1c555e2a1f99b6290de94ca28 \ + --hash=sha256:c27af5f9a6538891af7257e104a37affbe26383fc0bd57b52c05fe2f45292dc9 \ + --hash=sha256:c49e29739508a7142fa1437256a7bf631926e70e68ca50a6bd62ee4e80050acc \ + --hash=sha256:c9d444b26317f3c40909d555f9c611ef8bcac6edf016af7709a32ad5848b481d \ + --hash=sha256:da3d682956e447f61ad23f66f49f20f189d12b15857a2e524497793ae54027c4 \ + --hash=sha256:db004eb8ee7aab9c86647b92e1e570edb6fec9bd384a7a4f24e1f6529db34ac3 \ + --hash=sha256:e0014bd1b9703c9cdfa7f88bc793600aee5f858dd2f18105b49a70e66b9f1b1d \ + --hash=sha256:e1689ced6f6ebf674281d85c143529232aa039c4e8d814bf3b425f1793bfdeb4 \ + --hash=sha256:e340851bbdea91408e6713748b4082c2e464a80eef9f9a69ff5a20e5e008cace \ + --hash=sha256:ec845584fcdc10763d869b40b742fe0e2684adf3ca275ec997b9447ef5fe3ad9 \ + --hash=sha256:ed09efbcdc8bce5b5ff052ffd1edabdabd7a43e340ee63f8d5e81644dc50110f \ + --hash=sha256:ed2137d64470469c825713aac486aacc9e2c46e300b92cb39ae47f4024b86b2e \ + --hash=sha256:f6f62330a3999522282d0cc6370682d86985ac66edc2799f5934e309d8d615f1 \ + --hash=sha256:fa589e4d2b54d95447cb79a6053050fc7218f61594085ca54672cb045ba0f7f8 diff --git a/api/src/.coveragerc b/api/src/.coveragerc new file mode 100644 index 0000000..af30fd5 --- /dev/null +++ b/api/src/.coveragerc @@ -0,0 +1,4 @@ +[run] +relative_files = True +source = ./ +omit = reportcreator_api/tests/* diff --git a/api/src/frontend/index.html b/api/src/frontend/index.html new file mode 100644 index 0000000..6a2db83 --- /dev/null +++ b/api/src/frontend/index.html @@ -0,0 +1,6 @@ + + + +

Frontend not built

+

In development mode, the frontend is accessible via http://localhost:3000/

+ \ No newline at end of file diff --git a/api/src/frontend/static/.gitkeep b/api/src/frontend/static/.gitkeep new file mode 100644 index 0000000..e69de29 diff --git a/api/src/manage.py b/api/src/manage.py new file mode 100644 index 0000000..d9f588b --- /dev/null +++ b/api/src/manage.py @@ -0,0 +1,22 @@ +#!/usr/bin/env python +"""Django's command-line utility for administrative tasks.""" +import os +import sys + + +def main(): + """Run administrative tasks.""" + os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'reportcreator_api.conf.settings') + try: + from django.core.management import execute_from_command_line + except ImportError as exc: + raise ImportError( + "Couldn't import Django. Are you sure it's installed and " + "available on your PYTHONPATH environment variable? Did you " + "forget to activate a virtual environment?" + ) from exc + execute_from_command_line(sys.argv) + + +if __name__ == '__main__': + main() diff --git a/api/src/pytest.ini b/api/src/pytest.ini new file mode 100644 index 0000000..022bb14 --- /dev/null +++ b/api/src/pytest.ini @@ -0,0 +1,5 @@ +[pytest] +DJANGO_SETTINGS_MODULE = reportcreator_api.conf.settings_test +# warnings from libraries +filterwarnings = + ignore:remove loop argument:DeprecationWarning \ No newline at end of file diff --git a/api/src/reportcreator_api/__init__.py b/api/src/reportcreator_api/__init__.py new file mode 100644 index 0000000..2604ad8 --- /dev/null +++ b/api/src/reportcreator_api/__init__.py @@ -0,0 +1,3 @@ +from reportcreator_api.conf.celery import celery_app + +__all__ = ('celery_app',) \ No newline at end of file diff --git a/api/src/reportcreator_api/api_utils/__init__.py b/api/src/reportcreator_api/api_utils/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/api/src/reportcreator_api/api_utils/apps.py b/api/src/reportcreator_api/api_utils/apps.py new file mode 100644 index 0000000..86ce971 --- /dev/null +++ b/api/src/reportcreator_api/api_utils/apps.py @@ -0,0 +1,7 @@ +from django.apps import AppConfig + + +class ApiUtilsConfig(AppConfig): + default_auto_field = 'django.db.models.BigAutoField' + name = 'reportcreator_api.api_utils' + diff --git a/api/src/reportcreator_api/api_utils/backup_utils.py b/api/src/reportcreator_api/api_utils/backup_utils.py new file mode 100644 index 0000000..ae72ebc --- /dev/null +++ b/api/src/reportcreator_api/api_utils/backup_utils.py @@ -0,0 +1,97 @@ +import logging +import zipstream +import boto3 +import io +import json +import itertools +from pathlib import Path +from django.apps import apps +from django.core import serializers +from django.core.serializers.json import DjangoJSONEncoder + +from reportcreator_api.archive import crypto +from reportcreator_api.pentests.models import UploadedImage, UploadedAsset, UploadedProjectFile, UploadedUserNotebookImage, ArchivedProject + + +def create_database_dump(): + """ + Return a database dump of django models. It uses the same format as "manage.py dumpdata --format=jsonl". + """ + exclude_models = ['contenttypes.ContentType', 'sessions.Session', 'users.Session', 'admin.LogEntry', 'auth.Permission', 'auth.Group', 'pentests.LockInfo'] + try: + app_list = [app_config for app_config in apps.get_app_configs() if app_config.models_module is not None] + models = list(itertools.chain(*map(lambda a: a.get_models(), app_list))) + for model in models: + natural_key = True + if model._meta.label == 'users.PentestUser': + natural_key = False + if model._meta.label not in exclude_models: + for e in model._default_manager.order_by(model._meta.pk.name).iterator(): + yield json.dumps( + serializers.serialize( + 'python', + [e], + use_natural_foreign_keys=natural_key, + use_natural_primary_keys=natural_key + )[0], cls=DjangoJSONEncoder, ensure_ascii=True).encode() + b'\n' + except Exception as ex: + logging.exception('Error creating database dump') + raise ex + + +def backup_files(z, model, path): + for f in model.objects.values_list('file', flat=True).distinct().iterator(): + try: + z.write_iter(str(Path(path) / f), model.file.field.storage.open(f).chunks()) + except (FileNotFoundError, OSError) as ex: + logging.warning(f'Could not backup file {f}: {ex}') + + +def create_backup(): + logging.info('Backup requested') + z = zipstream.ZipFile(mode='w', compression=zipstream.ZIP_DEFLATED) + z.write_iter('backup.jsonl', create_database_dump()) + + backup_files(z, UploadedImage, 'uploadedimages') + backup_files(z, UploadedUserNotebookImage, 'uploadedimages') + backup_files(z, UploadedAsset, 'uploadedassets') + backup_files(z, UploadedProjectFile, 'uploadedfiles') + backup_files(z, ArchivedProject, 'archivedfiles') + + return z + + +def encrypt_backup(z, aes_key): + buf = io.BytesIO() + with crypto.open(fileobj=buf, mode='wb', key_id=None, key=crypto.EncryptionKey(id=None, key=aes_key)) as c: + for chunk in z: + c.write(chunk) + yield buf.getvalue() + buf.seek(0) + buf.truncate() + if remaining := buf.getvalue(): + yield remaining + + +def upload_to_s3_bucket(z, s3_params): + s3 = boto3.resource('s3', **s3_params.get('boto3_params', {})) + bucket = s3.Bucket(s3_params['bucket_name']) + + class Wrapper: + def __init__(self, z): + self.z = iter(z) + self.buffer = b'' + + def read(self, size=8192): + while len(self.buffer) < size: + try: + self.buffer += next(self.z) + except StopIteration: + break + ret = self.buffer[:size] + + self.buffer = self.buffer[size:] + return ret + + bucket.upload_fileobj(Wrapper(z), s3_params['key']) + diff --git a/api/src/reportcreator_api/api_utils/healthchecks.py b/api/src/reportcreator_api/api_utils/healthchecks.py new file mode 100644 index 0000000..693ebe6 --- /dev/null +++ b/api/src/reportcreator_api/api_utils/healthchecks.py @@ -0,0 +1,56 @@ +import functools +import operator +import uuid +from django.utils.module_loading import import_string +from django.core.cache import cache +from django.db import connection +from django.db.migrations.executor import MigrationExecutor +from rest_framework.response import Response + + +def run_healthchecks(checks: dict[str, str]): + res = {} + for service, check_func_name in checks.items(): + check_func = import_string(check_func_name) + res[service] = check_func() + + has_errors = not all(res.values()) + return Response(data=res, status=503 if has_errors else 200) + + +def check_database(): + """ + Check if the application can perform a dummy sql query + """ + with connection.cursor() as cursor: + cursor.execute("SELECT 1; -- Healthcheck") + row = cursor.fetchone() + return row and row[0] == 1 + + +def check_cache(): + """ + Check if the application can connect to the default cached and read/write some dummy data. + """ + dummy = str(uuid.uuid4()) + key = "healthcheck:%s" % dummy + cache.set(key, dummy, timeout=5) + cached_value = cache.get(key) + cache.delete(key) + return cached_value == dummy + + +def check_migrations(): + """ + Check if django has unapplied migrations. + """ + cache_key = __name__ + '.migration_check_cache' + if res := cache.get(cache_key): + return res + + executor = MigrationExecutor(connection) + res = not executor.migration_plan(executor.loader.graph.leaf_nodes()) + if res: + cache.set(key=cache_key, value=res, timeout=10 * 60) + return res + diff --git a/api/src/reportcreator_api/api_utils/migrations/0001_languagetool.py b/api/src/reportcreator_api/api_utils/migrations/0001_languagetool.py new file mode 100644 index 0000000..0ca4160 --- /dev/null +++ b/api/src/reportcreator_api/api_utils/migrations/0001_languagetool.py @@ -0,0 +1,38 @@ +from django.db import migrations, models +from django.conf import settings + + +class Migration(migrations.Migration): + """ + Create database models used by LanguageTool. + LanguageTool does not come with a way to run migrations, therefore we manage them with the Django ORM. + Resue Django user accounts for LanguageTool users to authenticate (via a DB view). + """ + + initial = True + + dependencies = [ + migrations.swappable_dependency(settings.AUTH_USER_MODEL), + ] + + operations = [ + migrations.RunSQL( + sql="CREATE VIEW users AS " + + "SELECT ('x' || translate(u.id::text, '-', ''))::bit(63)::bigint AS id, u.id::text AS email, u.id::text AS api_key FROM users_pentestuser u " + + "UNION SELECT 1 AS id, 'languagetool' AS email, 'languagetool' AS api_key;", + reverse_sql="DROP VIEW users;" + ), + migrations.CreateModel( + name='LanguageToolIgnoreWords', + fields=[ + ('id', models.BigAutoField(primary_key=True, serialize=False)), + ('user_id', models.BigIntegerField(db_index=True)), + ('ignore_word', models.CharField(max_length=255)), + ('created_at', models.DateTimeField(auto_created=True)), + ('updated_at', models.DateTimeField(auto_now=True)), + ], + options={ + 'db_table': 'ignore_words', + } + ) + ] diff --git a/api/src/reportcreator_api/api_utils/migrations/__init__.py b/api/src/reportcreator_api/api_utils/migrations/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/api/src/reportcreator_api/api_utils/models.py b/api/src/reportcreator_api/api_utils/models.py new file mode 100644 index 0000000..4671082 --- /dev/null +++ b/api/src/reportcreator_api/api_utils/models.py @@ -0,0 +1,13 @@ +from django.db import models + + +class LanguageToolIgnoreWords(models.Model): + id = models.BigAutoField(primary_key=True) + created_at = models.DateTimeField(auto_created=True) + updated_at = models.DateTimeField(auto_now=True) + user_id = models.BigIntegerField(db_index=True) + ignore_word = models.CharField(max_length=255) + + class Meta: + db_table = 'ignore_words' + diff --git a/api/src/reportcreator_api/api_utils/permissions.py b/api/src/reportcreator_api/api_utils/permissions.py new file mode 100644 index 0000000..418e994 --- /dev/null +++ b/api/src/reportcreator_api/api_utils/permissions.py @@ -0,0 +1,11 @@ +from rest_framework.permissions import IsAuthenticated + +class IsSystemUser(IsAuthenticated): + def has_permission(self, request, view): + return super().has_permission(request, view) and request.user.is_system_user + + +class IsUserManagerOrSuperuser(IsAuthenticated): + def has_permission(self, request, view): + return super().has_permission(request, view) and (request.user.is_user_manager or request.user.is_superuser) + diff --git a/api/src/reportcreator_api/api_utils/serializers.py b/api/src/reportcreator_api/api_utils/serializers.py new file mode 100644 index 0000000..185cb7d --- /dev/null +++ b/api/src/reportcreator_api/api_utils/serializers.py @@ -0,0 +1,114 @@ +import json +import logging +import httpx +from base64 import b64decode +from urllib.parse import urljoin +from django.conf import settings +from rest_framework import serializers, exceptions + +from reportcreator_api.pentests.models import Language + + +log = logging.getLogger(__name__) + + +class TextAnnotationField(serializers.Serializer): + text = serializers.CharField(required=False, allow_blank=True, trim_whitespace=False) + markup = serializers.CharField(required=False, allow_blank=True, trim_whitespace=False) + interpretAs = serializers.CharField(required=False, allow_blank=True, trim_whitespace=False) + offset = serializers.IntegerField(min_value=0, required=False) + + def validate(self, attrs): + if attrs.get('text') is None and attrs.get('markup') is None: + raise serializers.ValidationError('Either text or markup is required') + return attrs + + +class TextDataField(serializers.Serializer): + annotation = TextAnnotationField(many=True) + + +class LanguageToolSerializerBase(serializers.Serializer): + def languagetool_auth(self): + return { + 'username': str(self.context['request'].user.id), + 'apiKey': str(self.context['request'].user.id), + } if settings.SPELLCHECK_DICTIONARY_PER_USER else { + 'username': 'languagetool', + 'apiKey': 'languagetool', + } + + async def languagetool_request(self, path, data): + if not settings.SPELLCHECK_URL: + raise exceptions.PermissionDenied('Spell checker not configured') + + async with httpx.AsyncClient(timeout=10) as client: + res = await client.post( + url=urljoin(settings.SPELLCHECK_URL, path), + data=self.languagetool_auth() | data + ) + return res.json() + + +class LanguageToolSerializer(LanguageToolSerializerBase): + language = serializers.ChoiceField(choices=Language.choices + [('auto', 'auto')]) + data = TextDataField() + + async def spellcheck(self): + data = self.validated_data + return await self.languagetool_request('/v2/check', { + 'language': data['language'], + 'data': json.dumps(data['data'], ensure_ascii=False), + **({ + 'preferredVariants': Language.values + } if data['language'] == 'auto' else {}), + }) + + +def validate_singe_word(val): + if ' ' in val: + raise serializers.ValidationError('Only a single word is supported') + + +class LanguageToolAddWordSerializer(LanguageToolSerializerBase): + word = serializers.CharField(max_length=255, validators=[validate_singe_word]) + + async def save(self): + return await self.languagetool_request('/v2/words/add', { + 'word': self.validated_data['word'], + }) + + +class S3ParamsSerializer(serializers.Serializer): + bucket_name = serializers.CharField() + key = serializers.CharField() + boto3_params = serializers.JSONField(required=False) + + +class BackupSerializer(serializers.Serializer): + key = serializers.CharField() + aes_key = serializers.CharField(required=False, allow_null=True) + s3_params = S3ParamsSerializer(required=False, allow_null=True) + + def validate_key(self, key): + if not settings.BACKUP_KEY or len(settings.BACKUP_KEY) < 20: + log.error('Backup key not set or too short (min 20 chars)') + raise serializers.ValidationError() + if key != settings.BACKUP_KEY: + log.error('Invalid backup key') + raise serializers.ValidationError() + return key + + def validate_aes_key(self, value): + if not value: + return None + + try: + key_bytes = b64decode(value) + if len(key_bytes) != 32: + raise serializers.ValidationError('Invalid key length: must be a 256-bit AES key') + return value + except ValueError: + raise serializers.ValidationError('Invalid base64 encoding') + + diff --git a/api/src/reportcreator_api/api_utils/views.py b/api/src/reportcreator_api/api_utils/views.py new file mode 100644 index 0000000..f5e8527 --- /dev/null +++ b/api/src/reportcreator_api/api_utils/views.py @@ -0,0 +1,130 @@ +import logging +from asgiref.sync import sync_to_async +from base64 import b64decode +from django.http import StreamingHttpResponse +from django.conf import settings +from django.utils import timezone +from rest_framework import viewsets, routers +from rest_framework.serializers import Serializer +from rest_framework.response import Response +from rest_framework.decorators import action +from rest_framework.settings import api_settings + +from reportcreator_api.api_utils.serializers import LanguageToolAddWordSerializer, LanguageToolSerializer, BackupSerializer +from reportcreator_api.api_utils.healthchecks import run_healthchecks +from reportcreator_api.api_utils.permissions import IsSystemUser, IsUserManagerOrSuperuser +from reportcreator_api.api_utils import backup_utils +from reportcreator_api.users.models import PentestUser +from reportcreator_api.utils.api import GenericAPIViewAsync +from reportcreator_api.utils import license +from reportcreator_api.pentests.models import Language +from reportcreator_api.pentests.models import ProjectMemberRole +from reportcreator_api.tasks.models import PeriodicTask +from reportcreator_api.utils.utils import copy_keys + + +log = logging.getLogger(__name__) + + +class UtilsViewSet(viewsets.ViewSet): + def get_serializer_class(self): + if self.action == 'backup': + return BackupSerializer + elif self.action == 'spellcheck': + return LanguageToolSerializer + else: + return Serializer + + def get_serializer(self, *args, **kwargs): + return self.get_serializer_class()(*args, **kwargs) + + def list(self, *args, **kwargs): + return routers.APIRootView(api_root_dict={ + 'settings': 'utils-settings', + 'license': 'utils-license', + 'spellcheck': 'utils-spellcheck', + 'backup': 'utils-backup', + 'healthcheck': 'utils-healthcheck', + }).get(*args, **kwargs) + + @action(detail=False, url_name='settings', url_path='settings', authentication_classes=[], permission_classes=[]) + def settings_endpoint(self, *args, **kwargs): + return Response({ + 'languages': [{'code': l[0], 'name': l[1]} for l in Language.choices], + 'project_member_roles': [{'role': r.role, 'default': r.default} for r in ProjectMemberRole.predefined_roles], + 'auth_providers': [{'id': k, 'name': v.get('label', k)} for k, v in settings.AUTHLIB_OAUTH_CLIENTS.items()] if license.is_professional() else [], + 'elastic_apm_rum_config': settings.ELASTIC_APM_RUM_CONFIG if settings.ELASTIC_APM_RUM_ENABLED else None, + 'archiving_threshold': settings.ARCHIVING_THRESHOLD, + 'license': copy_keys(license.check_license(), ['type', 'error']), + 'features': { + 'private_designs': settings.ENABLE_PRIVATE_DESIGNS, + 'spellcheck': bool(settings.SPELLCHECK_URL and license.is_professional()), + 'archiving': license.is_professional(), + }, + }) + + @action(detail=False, methods=['post'], permission_classes=api_settings.DEFAULT_PERMISSION_CLASSES + [IsSystemUser, license.ProfessionalLicenseRequired]) + def backup(self, request, *args, **kwargs): + serializer = self.get_serializer(data=request.data) + serializer.is_valid(raise_exception=True) + data = serializer.data + aes_key = data.get('aes_key') + if aes_key: + aes_key = b64decode(aes_key) + + z = backup_utils.create_backup() + if aes_key: + z = backup_utils.encrypt_backup(z, aes_key=aes_key) + + if s3_params := data.get('s3_params'): + backup_utils.upload_to_s3_bucket(z, s3_params) + return Response(status=200) + else: + response = StreamingHttpResponse(z) + filename = f'backup-{timezone.now().isoformat()}.zip' + if aes_key: + filename += '.crypt' + else: + response['Content-Type'] = 'application/zip' + + response['Content-Disposition'] = f"attachment; filename={filename}" + log.info('Sending Backup') + return response + + @action(detail=False, methods=['get'], permission_classes=api_settings.DEFAULT_PERMISSION_CLASSES + [IsUserManagerOrSuperuser]) + def license(self, request, *args, **kwargs): + return Response(data=license.check_license() | { + 'active_users': PentestUser.objects.get_licensed_user_count(), + }) + + +class SpellcheckView(GenericAPIViewAsync): + serializer_class = LanguageToolSerializer + permission_classes = api_settings.DEFAULT_PERMISSION_CLASSES + [license.ProfessionalLicenseRequired] + + async def post(self, request, *args, **kwargs): + serializer = await self.aget_valid_serializer(data=request.data) + data = await serializer.spellcheck() + return Response(data=data) + + +class SpellcheckWordView(GenericAPIViewAsync): + serializer_class = LanguageToolAddWordSerializer + permission_classes = api_settings.DEFAULT_PERMISSION_CLASSES + [license.ProfessionalLicenseRequired] + + async def post(self, request, *args, **kwargs): + serializer = await self.aget_valid_serializer(data=request.data) + data = await serializer.save() + return Response(data=data) + + +class HealthcheckView(GenericAPIViewAsync): + authentication_classes = [] + permission_classes = [] + + async def get(self, *args, **kwargs): + # Trigger periodic tasks + await PeriodicTask.objects.run_all_pending_tasks() + + return await sync_to_async(run_healthchecks)(settings.HEALTH_CHECKS) + diff --git a/api/src/reportcreator_api/archive/__init__.py b/api/src/reportcreator_api/archive/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/api/src/reportcreator_api/archive/crypto/__init__.py b/api/src/reportcreator_api/archive/crypto/__init__.py new file mode 100644 index 0000000..4345a7c --- /dev/null +++ b/api/src/reportcreator_api/archive/crypto/__init__.py @@ -0,0 +1,4 @@ +from .base import * +from .fields import * +from .storage import * +from .secret_sharing import * diff --git a/api/src/reportcreator_api/archive/crypto/base.py b/api/src/reportcreator_api/archive/crypto/base.py new file mode 100644 index 0000000..c854be0 --- /dev/null +++ b/api/src/reportcreator_api/archive/crypto/base.py @@ -0,0 +1,362 @@ +import base64 +import dataclasses +import enum +import io +import json +from typing import Optional +from Cryptodome.Cipher import AES +from Cryptodome.Cipher._mode_gcm import _GHASH, _ghash_clmul, _ghash_portable +from Cryptodome.Util.number import long_to_bytes, bytes_to_long + +from django.conf import settings +from django.core.files.utils import FileProxyMixin + + +# Magic bytes to identify encrypted data +# Invalid UTF-8, such that an error occurs when someone tries to load encrypted data as text +MAGIC = b'\xC2YPT' + + +class CryptoError(Exception): + pass + + +class EncryptionCipher(enum.Enum): + AES_GCM = 'AES-GCM' + + +@dataclasses.dataclass +class EncryptionKey: + id: str + key: bytes + cipher: EncryptionCipher = EncryptionCipher.AES_GCM + revoked: bool = False + + @classmethod + def from_json_list(cls, data: str) -> dict: + if not data: + return {} + return dict(map(lambda e: (e['id'], cls(**(e | { + 'cipher': EncryptionCipher(e['cipher']), + 'key': base64.b64decode(e['key']), + }))), json.loads(data))) + + +class ReadIntoAdapter(FileProxyMixin): + def __init__(self, file) -> None: + self.file = file + + def readinto(self, b): + r = self.file.read(len(b)) + b[0:len(r)] = r + return len(r) + + +def open(fileobj, mode='r', **kwargs): + plaintext_fallback = kwargs.get('plaintext_fallback', settings.ENCRYPTION_PLAINTEXT_FALLBACK) + + if mode in ['r', 'rb']: + key = kwargs.pop('key', None) + keys = kwargs.pop('keys', settings.ENCRYPTION_KEYS) + + if not hasattr(fileobj, 'readinto'): + fileobj = ReadIntoAdapter(fileobj) + if not hasattr(fileobj, 'peek'): + fileobj = io.BufferedReader(fileobj) + if fileobj.peek(len(MAGIC)).startswith(MAGIC): + return DecryptionStream(fileobj=fileobj, key=key, keys=keys, **kwargs) + elif plaintext_fallback: + return BufferedReaderNonClosing(fileobj) + else: + raise CryptoError('Data is not encrypted and plaintext fallback is disabled') + elif mode in ['w', 'wb']: + key_id = kwargs.pop('key_id', settings.DEFAULT_ENCRYPTION_KEY_ID) + key = kwargs.pop('key', settings.ENCRYPTION_KEYS.get(key_id)) + + if key: + return EncryptionStream(fileobj, key=key, **kwargs) + elif plaintext_fallback: + return BufferedWriterNonClosing(fileobj) + else: + raise CryptoError('No key provided and plaintext fallback is disabled') + + +def readexact(fileobj, size): + out = b'' + while len(out) < size: + chunk = fileobj.read(size - len(out)) + if not chunk: + raise CryptoError('Data missing on stream. Cannot read desired amount of data.') + out += chunk + return out + + +def readall(fileobj): + out = b'' + while chunk := fileobj.read(): + out += chunk + return out + + +class NonClosingBufferedIOMixin: + """ + BufferedReader that does not close the underlying raw stream when the reader gets closed. + """ + def close(self): + if self.raw is not None and not self.closed: + # may raise BlockingIOError or BrokenPipeError etc + self.flush() + +class BufferedReaderNonClosing(NonClosingBufferedIOMixin, io.BufferedReader): + pass + + +class BufferedWriterNonClosing(NonClosingBufferedIOMixin, io.BufferedWriter): + pass + + +class EncryptionStream(io.RawIOBase): + def __init__(self, fileobj, key: EncryptionKey, nonce=None) -> None: + self.fileobj = fileobj + self.header_written = False + self.key = key + self.cipher = self._init_cipher(nonce=nonce) + + def readable(self) -> bool: + return False + + def writable(self) -> bool: + return True + + def seekable(self) -> bool: + return False + + def _init_cipher(self, nonce=None): + if self.key.revoked: + raise CryptoError('Key is revoked. It cannot be used for encryption anymore.') + if self.key.cipher == EncryptionCipher.AES_GCM: + return AES.new(key=self.key.key, mode=AES.MODE_GCM, nonce=nonce) + else: + raise CryptoError('Unknown cipher') + + def _ensure_header(self): + if self.header_written: + return + + # Write header at start of file before any data + header = MAGIC + json.dumps({ + 'cipher': self.key.cipher.value, + 'nonce': base64.b64encode(self.cipher.nonce).decode(), + 'key_id': self.key.id, + }).encode() + b'\x00' + self.fileobj.write(header) + + # Add header to authentication data. Modifications in header will be detected by authentication tag. + self.cipher.update(header) + self.header_written = True + + def write(self, data: bytes): + if self.closed: + raise ValueError('write() on closed stream') + + # Encrypt data + self._ensure_header() + self.fileobj.write(self.cipher.encrypt(data)) + + def flush(self) -> None: + return self.fileobj.flush() + + def close(self): + if self.closed: + return + try: + # Write authentication tag at end + self._ensure_header() + tag = self.cipher.digest() + self.fileobj.write(tag) + finally: + super().close() + + +class DecryptionStream(io.RawIOBase): + def __init__(self, fileobj, key: Optional[EncryptionKey] = None, keys: Optional[dict[str, EncryptionKey]] = None) -> None: + self.fileobj = fileobj + self.metdata = None + self.cipher = None + self.header_len = 0 + self.auth_tag_len = 16 + self.auth_tag_buffer = bytearray() + self.auth_tag_verified = False + + self._load_header(key=key, keys=keys) + + def readable(self) -> bool: + return True + + def writable(self) -> bool: + return False + + def seekable(self) -> bool: + return self.fileobj.seekable() and self.metadata['cipher'] == EncryptionCipher.AES_GCM + + def _load_header(self, key=None, keys=None): + # Check magic + if self.fileobj.read(len(MAGIC)) != MAGIC: + raise CryptoError('Invalid header: magic not found') + + # Read metadata + metadata_buffer = bytearray() + while True: + b = self.fileobj.read(1) + if not b: + raise CryptoError('Invalid header: missing or corrupted metadata') + elif b == b'\x00': + break + else: + metadata_buffer.extend(b) + + # Decode metadata + try: + self.metadata = json.loads(metadata_buffer) + self.metadata['cipher'] = EncryptionCipher(self.metadata['cipher']) + self.metadata['nonce'] = base64.b64decode(self.metadata['nonce']) + + if key: + self.metadata['key'] = key + elif keys: + self.metadata['key'] = keys.get(self.metadata['key_id']) + else: + raise CryptoError('Either a key or a multiple available keys must be given') + except CryptoError as ex: + raise + except Exception as ex: + raise CryptoError('Failed to load metadata') from ex + + # Check metadata + if not self.metadata['key']: + raise CryptoError('Metadata contains unknown key_id. Cannot find a suitable key for decryption.') + if self.metadata['key'].revoked: + raise CryptoError('Key was revoked and cannot be used for decryption anymore.') + + # Initialize cipher + try: + if self.metadata['key'].cipher == EncryptionCipher.AES_GCM: + self.cipher = AES.new( + mode=AES.MODE_GCM, + key=self.metadata['key'].key, + nonce=self.metadata['nonce'] + ) + else: + raise CryptoError('Unsupported cipher') + except Exception as ex: + raise CryptoError('Error initializing cipher') from ex + + # Add header to auth tag + header = MAGIC + metadata_buffer + b'\x00' + self.header_len = len(header) + self.cipher.update(header) + + # Buffer auth tag at end + self.auth_tag_buffer.extend(readexact(self.fileobj, self.auth_tag_len)) + + def read(self, size=-1): + # Decrypt data (except auth tag at end of stream) + self.auth_tag_buffer.extend(self.fileobj.read(size)) + res = self.auth_tag_buffer[:-self.auth_tag_len] + del self.auth_tag_buffer[:-self.auth_tag_len] + return self.cipher.decrypt(res) + + def readinto(self, buf) -> int: + val = self.read(len(buf)) + buf[:len(val)] = val + return len(val) + + def tell(self) -> int: + return self.fileobj.tell() - self.header_len - len(self.auth_tag_buffer) + + def seek(self, offset: int, whence=io.SEEK_SET) -> int: + if not self.seekable(): + raise io.UnsupportedOperation() + + if whence not in [io.SEEK_SET, io.SEEK_END]: + return self.tell() + + # AEAD cipher modes support only linear decryption, no seeking + # In order to be able to change the position, we first verify the auth tag to ensure that the ciphertext was not modified + self._verify_auth_tag() + self.auth_tag_buffer.clear() + + # Then seek to the desired position in the file + if whence == io.SEEK_SET: + pos_absolute = self.fileobj.seek(offset + self.header_len, whence) + elif whence == io.SEEK_END: + pos_absolute = self.fileobj.seek(0, whence) + pos_absolute = self.fileobj.seek(pos_absolute - self.auth_tag_len, io.SEEK_SET) + + # Algin position in ciphertext to cipher blocks + pos_in_ciphertext = pos_absolute - self.header_len + num_blocks_skip = pos_in_ciphertext // self.cipher.block_size + align_block_skip = pos_in_ciphertext % self.cipher.block_size + self.fileobj.seek(pos_absolute - align_block_skip, io.SEEK_SET) + + # Then we can use a regular CTR mode for decryption. CTR mode supports encrypting/decrypting arbitrary blocks. + # We need to initialize the CTR mode with the correct nonce/IV. They need to calculated the same way as for the GCM mode. + self.cipher = self._init_seek_cipher_aes_gcm(key=self.metadata['key'].key, nonce=self.metadata['nonce'], skip_blocks=num_blocks_skip) + + # Finally can move from the block boundary to the final position + self.auth_tag_buffer.clear() + self.auth_tag_buffer.extend(readexact(self.fileobj, self.auth_tag_len)) + self.read(align_block_skip) + + return self.tell() + + def _init_seek_cipher_aes_gcm(self, key, nonce, skip_blocks): + """ + Initialized a new AES CTR cipher at a given block offset. + Counter calculation is compatible with AES GCM. + GCM CTR cipher initialized code is taken from Cryptodome.Cipher._mode_gcm.GcmMode.__init__ + """ + # Step 1 in SP800-38D, Algorithm 4 (encryption) - Compute H + # See also Algorithm 5 (decryption) + hash_subkey = AES.new(mode=AES.MODE_ECB, key=key).encrypt(b'\x00' * 16) + + # Step 2 - Compute J0 + if len(nonce) == 12: + j0 = nonce + b"\x00\x00\x00\x01" + else: + fill = (16 - (len(nonce) % 16)) % 16 + 8 + ghash_in = (nonce + + b'\x00' * fill + + long_to_bytes(8 * len(nonce), 8)) + j0 = _GHASH(hash_subkey, _ghash_clmul or _ghash_portable).update(ghash_in).digest() + + # Step 3 - Prepare GCTR cipher for encryption/decryption + nonce_ctr = j0[:12] + iv_ctr = (bytes_to_long(j0) + 1 + skip_blocks) & 0xFFFFFFFF + return AES.new( + mode=AES.MODE_CTR, + key=key, + initial_value=iv_ctr, + nonce=nonce_ctr) + + def _verify_auth_tag(self): + if self.auth_tag_verified: + return + + try: + # Read everything to update the internal auth tag calculation + while _ := self.read(): + pass + + self.cipher.verify(self.auth_tag_buffer) + self.auth_tag_verified = True + except Exception as ex: + raise CryptoError('Auth tag verification failed') from ex + + def close(self): + try: + self._verify_auth_tag() + finally: + super().close() + + diff --git a/api/src/reportcreator_api/archive/crypto/fields.py b/api/src/reportcreator_api/archive/crypto/fields.py new file mode 100644 index 0000000..3ba766d --- /dev/null +++ b/api/src/reportcreator_api/archive/crypto/fields.py @@ -0,0 +1,124 @@ +import io +import json + +import elasticapm +from django.db import models +from django.core import checks + +from reportcreator_api.archive.crypto import base as crypto + + +class EncryptedField(models.BinaryField): + def __init__(self, base_field, editable=True, *args, **kwargs) -> None: + self.base_field = base_field + super().__init__(editable=editable, *args, **kwargs) + + @property + def model(self): + try: + return self.__dict__["model"] + except KeyError: + raise AttributeError( + "'%s' object has no attribute 'model'" % self.__class__.__name__ + ) + + @model.setter + def model(self, model): + self.__dict__["model"] = model + self.base_field.model = model + + def check(self, **kwargs): + errors = super().check(**kwargs) + if self.base_field.remote_field: + errors.append( + checks.Error( + "Base field for EncryptedField cannot be a related field.", + obj=self, + ) + ) + else: + # Remove the field name checks as they are not needed here. + base_errors = self.base_field.check() + if base_errors: + messages = "\n ".join( + "%s (%s)" % (error.msg, error.id) for error in base_errors + ) + errors.append( + checks.Error( + "Base field for EncryptedField has errors:\n %s" % messages, + obj=self, + ) + ) + return errors + + def set_attributes_from_name(self, name): + super().set_attributes_from_name(name) + self.base_field.set_attributes_from_name(name) + + @property + def description(self): + return 'Encrypted ' + self.base_field.description + + def deconstruct(self): + name, path, args, kwargs = super().deconstruct() + kwargs.update({ + "base_field": self.base_field.clone(), + }) + return name, path, args, kwargs + + @elasticapm.capture_span() + def get_db_prep_value(self, value, connection, prepared=False): + if value is None: + return value + + if isinstance(self.base_field, models.JSONField): + value = json.dumps(value, cls=self.base_field.encoder).encode() + elif isinstance(self.base_field, models.BinaryField): + pass + else: + value = self.base_field.get_db_prep_value(value=value, connection=connection, prepared=prepared) + if isinstance(value, bytes): + pass + elif isinstance(value, str): + value = value.encode() + else: + value = str(value).encode() + + enc = io.BytesIO() + with crypto.open(fileobj=enc, mode='wb') as c: + c.write(value) + value = enc.getvalue() + + return super().get_db_prep_value(value=value, connection=connection, prepared=prepared) + + @elasticapm.capture_span() + def from_db_value(self, value, expression, connection): + value = super().to_python(value) + + if isinstance(value, (bytes, memoryview)): + with crypto.open(fileobj=io.BytesIO(value), mode='rb') as c: + value = crypto.readall(c) + if not isinstance(self.base_field, models.BinaryField): + value = value.decode() + if hasattr(self.base_field, 'from_db_value'): + value = self.base_field.from_db_value(value=value, expression=expression, connection=connection) + return self.base_field.to_python(value) + + def to_python(self, value): + return self.base_field.to_python(value) + + def value_to_string(self, obj): + return self.base_field.value_to_string(obj) + + def value_from_object(self, obj): + return self.base_field.value_from_object(obj) + + def formfield(self, **kwargs): + return self.base_field.formfield(**kwargs) + + def has_default(self) -> bool: + return self.base_field.has_default() + + def get_default(self): + return self.base_field.get_default() + diff --git a/api/src/reportcreator_api/archive/crypto/pgp.py b/api/src/reportcreator_api/archive/crypto/pgp.py new file mode 100644 index 0000000..7de6bf3 --- /dev/null +++ b/api/src/reportcreator_api/archive/crypto/pgp.py @@ -0,0 +1,55 @@ +from contextlib import contextmanager +import tempfile +import gnupg + +from reportcreator_api.archive.crypto.base import CryptoError + + +@contextmanager +def create_gpg(): + with tempfile.TemporaryDirectory() as d: + gpg = gnupg.GPG(gnupghome=d) + gpg.encoding = 'utf-8' + yield gpg + + +def public_key_info(public_key: str): + if not public_key: + raise CryptoError('No public key provided') + + with create_gpg() as gpg: + with tempfile.NamedTemporaryFile(mode='w') as f: + f.write(public_key) + f.flush() + res = gpg.scan_keys(f.name) + if len(res) == 0: + raise CryptoError('Invalid public key format') + if len(res) != 1: + raise CryptoError('Only 1 public key allowed') + key_info = res[0] + + if key_info.get('type') != 'pub': + raise CryptoError('Not a public key') + encryption_key_info = next(filter(lambda s: s.get('type') == 'sub' and s.get('cap') == 'e', key_info['subkey_info'].values()), None) + if not encryption_key_info: + raise CryptoError('No encryption key provided') + + # Allowed encryption ciphers: RSA, ECDH, ElGamal with min. key size + if encryption_key_info['algo'] not in ['1', '2', '16', '18']: + raise CryptoError('Unsupported algorithm') + if encryption_key_info['algo'] in ['1', '2', '16'] and int(encryption_key_info['length']) < 3072: + raise CryptoError('Key length too short. The minimum supported RSA key size is 3072 bit') + elif encryption_key_info['algo'] in ['18'] and int(encryption_key_info['length']) < 256: + raise CryptoError('Key length too short. The minimum supported Elliptic Curve size is 256 bit') + + return key_info + + +def encrypt(data: bytes, public_key: str): + with create_gpg() as gpg: + res = gpg.import_keys(public_key) + enc = gpg.encrypt(data=data, recipients=[res.results[0]['fingerprint']], always_trust=True) + if not enc.ok: + raise CryptoError('Encryption failed') + return enc.data.decode() + diff --git a/api/src/reportcreator_api/archive/crypto/secret_sharing.py b/api/src/reportcreator_api/archive/crypto/secret_sharing.py new file mode 100644 index 0000000..dbf5088 --- /dev/null +++ b/api/src/reportcreator_api/archive/crypto/secret_sharing.py @@ -0,0 +1,52 @@ +from Cryptodome.Protocol.SecretSharing import Shamir + + +SHAMIR_BLOCK_SIZE = 16 + + +class ShamirLarge(Shamir): + """ + Shamir's secret sharing scheme with support for secrets larger than 128 bit. + Code taken from unmerged PR: https://github.com/Legrandin/pycryptodome/pull/593/files + """ + + @staticmethod + def split_large(k, n, secret, ssss=False): + """ + Wrapper for Shamir.split() + when len(key) > SHAMIR_BLOCK_SIZE (16) + """ + if not isinstance(secret, bytes): + raise TypeError("Secret must be bytes") + if len(secret) % SHAMIR_BLOCK_SIZE != 0: + raise ValueError(f"Secret size must be a multiple of {SHAMIR_BLOCK_SIZE}") + + blocks = len(secret) // SHAMIR_BLOCK_SIZE + shares = [b'' for _ in range(n)] + for i in range(blocks): + block_shares = Shamir.split(k, n, + secret[i*SHAMIR_BLOCK_SIZE:(i+1)*SHAMIR_BLOCK_SIZE], ssss) + for j in range(n): + shares[j] += block_shares[j][1] + return [(i+1,shares[i]) for i in range(n)] + + @staticmethod + def combine_large(shares, ssss=False): + """ + Wrapper for Shamir.combine() + when len(key) > SHAMIR_BLOCK_SIZE (16) + """ + share_len = len(shares[0][1]) + for share in shares: + if len(share[1]) % SHAMIR_BLOCK_SIZE: + raise ValueError(f"Share #{share[0]} is not a multiple of {SHAMIR_BLOCK_SIZE}") + if len(share[1]) != share_len: + raise ValueError("Share sizes are inconsistent") + blocks = share_len // SHAMIR_BLOCK_SIZE + result = b'' + for i in range(blocks): + block_shares = [ + (int(idx), share[i*SHAMIR_BLOCK_SIZE:(i+1)*SHAMIR_BLOCK_SIZE]) + for idx, share in shares] + result += Shamir.combine(block_shares, ssss) + return result diff --git a/api/src/reportcreator_api/archive/crypto/storage.py b/api/src/reportcreator_api/archive/crypto/storage.py new file mode 100644 index 0000000..b6c5f7e --- /dev/null +++ b/api/src/reportcreator_api/archive/crypto/storage.py @@ -0,0 +1,71 @@ +import io +from typing import Iterator +from uuid import uuid4 +from django.core.files import File +from reportcreator_api.archive.crypto import base as crypto + + +class IterableToFileAdapter(File): + def __init__(self, iterable, name=None) -> None: + super().__init__(file=None, name=name) + self.iterator = iter(iterable) + self.buffer = b'' + + def read(self, size=-1): + while len(self.buffer) < size or size == -1: + try: + self.buffer += next(self.iterator) + except StopIteration: + break + + out = self.buffer[:size] + self.buffer = self.buffer[size:] + return out + + @property + def closed(self) -> bool: + return False + + def seekable(self) -> bool: + return False + + +class EncryptedFileAdapter(File): + def __init__(self, file, name=None, **kwargs) -> None: + self._original_file = file + self._crypto_kwargs = kwargs + super().__init__(IterableToFileAdapter(self._encrypted_chunks(file), name or file.name)) + + def _encrypted_chunks(self, file, chunk_size=None): + buf = io.BytesIO() + with crypto.open(fileobj=buf, mode='wb', **self._crypto_kwargs) as c: + for b in file.chunks(chunk_size=chunk_size): + c.write(b) + yield buf.getvalue() + buf.truncate(0) + buf.seek(0) + yield buf.getvalue() + + def chunks(self, chunk_size=None) -> Iterator[bytes]: + return self._encrypted_chunks(self._original_file, chunk_size) + + +class EncryptedStorageMixin: + def open(self, name, mode='rb', **kwargs): + return File(file=crypto.open(fileobj=super().open(name=name, mode=mode, **kwargs), mode=mode), name=name) + + def save(self, name, content, max_length=None): + return super().save(name=str(uuid4()), content=EncryptedFileAdapter(file=File(content)), max_length=max_length) + + def size(self, name): + size = super().size(name) + with crypto.open(fileobj=super().open(name=name, mode='rb'), mode='r') as c: + if hasattr(c, 'header_len') and hasattr(c, 'auth_tag_len'): + size -= c.header_len + c.auth_tag_len + return size + + def get_available_name(self, name, max_length=None): + randname = str(uuid4()) + randname_with_dir = randname[:2] + '/' + randname[2:] + return super().get_available_name(name=randname_with_dir, max_length=None) + diff --git a/api/src/reportcreator_api/archive/import_export/__init__.py b/api/src/reportcreator_api/archive/import_export/__init__.py new file mode 100644 index 0000000..d5de345 --- /dev/null +++ b/api/src/reportcreator_api/archive/import_export/__init__.py @@ -0,0 +1,8 @@ +from .import_export import export_project_types, export_projects, export_templates, \ + import_project_types, import_projects, import_templates + + +__all__ = [ + 'export_project_types', 'export_projects', 'export_templates', + 'import_project_types', 'import_projects', 'import_templates', +] diff --git a/api/src/reportcreator_api/archive/import_export/import_export.py b/api/src/reportcreator_api/archive/import_export/import_export.py new file mode 100644 index 0000000..6e649ab --- /dev/null +++ b/api/src/reportcreator_api/archive/import_export/import_export.py @@ -0,0 +1,186 @@ +import copy +import io +import json +import logging +from pathlib import Path +import tarfile +from typing import Iterable, Type +from django.http import FileResponse +from rest_framework import serializers +from django.db import transaction +from django.db.models import prefetch_related_objects, Prefetch +from django.core.serializers.json import DjangoJSONEncoder + +from reportcreator_api.archive.import_export.serializers import FindingTemplateExportImportSerializer, PentestProjectExportImportSerializer, ProjectTypeExportImportSerializer +from reportcreator_api.pentests.models import FindingTemplate, NotebookPage, PentestFinding, PentestProject, ProjectMemberInfo, ProjectType, ReportSection + + +log = logging.getLogger(__name__) + + +BLOCKSIZE = FileResponse.block_size + + +def build_tarinfo(name, size): + info = tarfile.TarInfo(name=name) + info.size = size + return info + + +def _yield_chunks(buffer: io.BytesIO, last_chunk=False): + """ + Split buffer in chunks of BLOCKSIZE and yield them. + Removes the returned chunks form the buffer. + If last_chunks=True, return the last chunk even if it is less than BLOCKSIZE + """ + val = buffer.getvalue() + buffer.truncate(0) + buffer.seek(0) + + num_chunks, len_remaining = divmod(len(val), BLOCKSIZE) + for i in range(num_chunks): + yield val[i * BLOCKSIZE:(i + 1) * BLOCKSIZE] + + if len_remaining > 0: + remaining = val[-len_remaining:] + if last_chunk: + yield remaining + else: + buffer.write(remaining) + + +def _tarfile_addfile(buffer, archive: tarfile.TarFile, tarinfo, file_chunks) -> Iterable[bytes]: + """ + Re-implementation of TarFile.addfile() that yields chunks to integrate into Django StreamingHttpResponse + """ + archive._check("awx") + + tarinfo = copy.copy(tarinfo) + + buf = tarinfo.tobuf(archive.format, archive.encoding, archive.errors) + archive.fileobj.write(buf) + archive.offset += len(buf) + + # re-implemented copyfileobj with yield after each block + for chunk in file_chunks: + archive.fileobj.write(chunk) + yield from _yield_chunks(buffer) + + blocks, remainder = divmod(tarinfo.size, tarfile.BLOCKSIZE) + if remainder > 0: + archive.fileobj.write(tarfile.NUL * (tarfile.BLOCKSIZE - remainder)) + blocks += 1 + archive.offset += blocks * tarfile.BLOCKSIZE + yield from _yield_chunks(buffer) + + archive.members.append(tarinfo) + + +def export_archive_iter(data, serializer_class: Type[serializers.Serializer], context=None) -> Iterable[bytes]: + try: + buffer = io.BytesIO() + + with tarfile.open(fileobj=buffer, mode='w|gz') as archive: + context = (context or {}) | { + 'archive': archive, + } + for obj in data: + serializer = serializer_class(instance=obj, context=context) + data = serializer.export() + archive_data = json.dumps(data, cls=DjangoJSONEncoder).encode() + yield from _tarfile_addfile( + buffer=buffer, + archive=archive, + tarinfo=build_tarinfo(name=f'{obj.id}.json', size=len(archive_data)), + file_chunks=[archive_data] + ) + + for name, file in serializer.export_files(): + yield from _tarfile_addfile( + buffer=buffer, + archive=archive, + tarinfo=build_tarinfo(name=name, size=file.size), + file_chunks=file.chunks() + ) + + yield from _yield_chunks(buffer=buffer, last_chunk=True) + except Exception as ex: + logging.exception('Error while exporting archive') + raise ex + + +@transaction.atomic() +def import_archive(archive_file, serializer_class: Type[serializers.Serializer]): + context = { + 'archive': None, + 'storage_files': [], + } + + try: + # We cannot use the streaming mode for import, because random access is required for importing files referenced in JSON + # However, the tarfile library does not load everything into memory at once, only the archive member metadata (e.g. filename) + # File contents are loaded only when reading them, but file reading can be streamed + with tarfile.open(fileobj=archive_file, mode='r') as archive: + context['archive'] = archive + + # Get JSON files to import + to_import = [] + for m in archive.getmembers(): + mp = Path(m.name) + if m.isfile() and mp.match('*.json') and not mp.parent.parts: + to_import.append(m.name) + + # Perform import + # The actual work is performed in serializers + imported_objects = [] + for m in to_import: + serializer = serializer_class(data=json.load(archive.extractfile(m)), context=context) + serializer.is_valid(raise_exception=True) + obj = serializer.perform_import() + log.info(f'Imported object {obj=} {obj.id=}') + imported_objects.append(obj) + + return imported_objects + except Exception as ex: + # Rollback partially imported data. DB rollback is done in the decorator + log.exception('Error while importing archive. Rolling back import.') + + for f in context.get('storage_files', []): + try: + f.delete() + except Exception: + log.exception(f'Failed to delete imported file "{f.name}" during rollback') + raise ex + + +def export_templates(data: Iterable[FindingTemplate]): + return export_archive_iter(data, serializer_class=FindingTemplateExportImportSerializer) + +def export_project_types(data: Iterable[ProjectType]): + prefetch_related_objects(data, 'assets') + return export_archive_iter(data, serializer_class=ProjectTypeExportImportSerializer) + +def export_projects(data: Iterable[PentestProject], export_all=False): + prefetch_related_objects( + data, + Prefetch('findings', PentestFinding.objects.select_related('assignee')), + Prefetch('sections', ReportSection.objects.select_related('assignee')), + Prefetch('notes', NotebookPage.objects.select_related('parent')), + Prefetch('members', ProjectMemberInfo.objects.select_related('user')), + 'images', + 'project_type__assets', + ) + return export_archive_iter(data, serializer_class=PentestProjectExportImportSerializer, context={ + 'export_all': export_all, + }) + + +def import_templates(archive_file): + return import_archive(archive_file, serializer_class=FindingTemplateExportImportSerializer) + +def import_project_types(archive_file): + return import_archive(archive_file, serializer_class=ProjectTypeExportImportSerializer) + +def import_projects(archive_file): + return import_archive(archive_file, serializer_class=PentestProjectExportImportSerializer) + diff --git a/api/src/reportcreator_api/archive/import_export/serializers.py b/api/src/reportcreator_api/archive/import_export/serializers.py new file mode 100644 index 0000000..c9e8b06 --- /dev/null +++ b/api/src/reportcreator_api/archive/import_export/serializers.py @@ -0,0 +1,405 @@ +from typing import Iterable +from django.core.files import File +from django.core.exceptions import ObjectDoesNotExist +from rest_framework import serializers +from reportcreator_api.pentests.customfields.utils import HandleUndefinedFieldsOptions, ensure_defined_structure + +from reportcreator_api.pentests.models import FindingTemplate, NotebookPage, PentestFinding, PentestProject, ProjectType, ReportSection, \ + SourceEnum, UploadedAsset, UploadedImage, UploadedFileBase, ProjectMemberInfo, UploadedProjectFile +from reportcreator_api.pentests.serializers import ProjectMemberInfoSerializer +from reportcreator_api.users.models import PentestUser +from reportcreator_api.users.serializers import RelatedUserSerializer +from reportcreator_api.utils.utils import omit_keys + + +class ExportImportSerializer(serializers.ModelSerializer): + def perform_import(self): + return self.create(self.validated_data.copy()) + + def export(self): + return self.data + + def export_files(self) -> Iterable[tuple[str, File]]: + return [] + + +class FormatField(serializers.Field): + def __init__(self, format): + self.format = format + self.default_validators = [self._validate_format] + super().__init__() + + def _validate_format(self, v): + if v != self.format: + raise serializers.ValidationError(f'Invalid format: expected "{self.format}" got "{v}"') + else: + raise serializers.SkipField() + + def get_attribute(self, instance): + return self.format + + def to_representation(self, value): + return value + + def to_internal_value(self, value): + return value + + +class UserIdSerializer(serializers.ModelSerializer): + class Meta: + model = PentestUser + fields = ['id'] + + +class RelatedUserIdExportImportSerializer(RelatedUserSerializer): + def __init__(self, **kwargs): + super().__init__(user_serializer=UserIdSerializer, **{'required': False, 'allow_null': True, 'default': None} | kwargs) + + def to_internal_value(self, data): + try: + return super().to_internal_value(data) + except PentestUser.DoesNotExist: + # If user does not exit: ignore + raise serializers.SkipField() + + +class UserDataSerializer(serializers.ModelSerializer): + class Meta: + model = PentestUser + fields = [ + 'id', 'email', 'phone', 'mobile', + 'name', 'title_before', 'first_name', 'middle_name', 'last_name', 'title_after', + ] + extra_kwargs = {'id': {'read_only': False}} + + +class RelatedUserDataExportImportSerializer(ProjectMemberInfoSerializer): + def __init__(self, **kwargs): + super().__init__(user_serializer=UserDataSerializer, **kwargs) + + def to_internal_value(self, data): + try: + return ProjectMemberInfo(**super().to_internal_value(data)) + except PentestUser.DoesNotExist: + return data + + +class ProjectMemberListExportImportSerializer(serializers.ListSerializer): + child = RelatedUserDataExportImportSerializer() + + def to_representation(self, project): + return super().to_representation(project.members.all()) + project.imported_members + + def to_internal_value(self, data): + return {self.field_name: super().to_internal_value(data)} + + +class OptionalPrimaryKeyRelatedField(serializers.PrimaryKeyRelatedField): + def __init__(self, **kwargs): + super().__init__(**{'required': False, 'allow_null': True, 'default': None} | kwargs) + + def to_internal_value(self, data): + if data is None: + raise serializers.SkipField() + try: + return self.get_queryset().get(pk=data) + except ObjectDoesNotExist: + raise serializers.SkipField() + + +class FindingTemplateExportImportSerializer(ExportImportSerializer): + format = FormatField('templates/v1') + + data = serializers.DictField(source='data_all') + + class Meta: + model = FindingTemplate + fields = ['format', 'id', 'created', 'updated', 'tags', 'language', 'status', 'data'] + extra_kwargs = {'id': {'read_only': True}, 'created': {'read_only': False}} + + def create(self, validated_data): + data = validated_data.pop('data_all', {}) + template = FindingTemplate(**{ + 'source': SourceEnum.IMPORTED, + } | validated_data) + template.update_data(data) + template.save() + return template + + +class FileListExportImportSerializer(serializers.ListSerializer): + def export_files(self): + for e in self.instance: + self.child.instance = e + yield from self.child.export_files() + + def extract_file(self, name): + return self.context['archive'].extractfile(self.child.get_path_in_archive(name)) + + def create(self, validated_data): + child_model_class = self.child.Meta.model + objs = [ + child_model_class(**attrs | { + 'name_hash': UploadedFileBase.hash_name(attrs['name']), + 'file': File( + file=self.extract_file(attrs['name']), + name=attrs['name']), + 'linked_object': self.child.get_linked_object() + }) for attrs in validated_data] + + child_model_class.objects.bulk_create(objs) + self.context['storage_files'].extend(map(lambda o: o.file, objs)) + return objs + + +class FileExportImportSerializer(ExportImportSerializer): + class Meta: + fields = ['id', 'created', 'updated', 'name'] + extra_kwargs = {'id': {'read_only': True}, 'created': {'read_only': False}} + list_serializer_class = FileListExportImportSerializer + + def validate_name(self, name): + if '/' in name or '\\' in name or '\x00' in name: + raise serializers.ValidationError(f'Invalid filename: {name}') + return name + + def get_linked_object(self): + pass + + def get_path_in_archive(self, name): + pass + + def export_files(self) -> Iterable[tuple[str, File]]: + yield self.get_path_in_archive(self.instance.name), self.instance.file + + +class UploadedImageExportImportSerializer(FileExportImportSerializer): + class Meta(FileExportImportSerializer.Meta): + model = UploadedImage + + def get_linked_object(self): + return self.context['project'] + + def get_path_in_archive(self, name): + # Get ID of old project_type from archive + return str(self.context.get('project_id') or self.get_linked_object().id) + '-images/' + name + + +class UploadedProjectFileExportImportSerializer(FileExportImportSerializer): + class Meta(FileExportImportSerializer.Meta): + model = UploadedProjectFile + + def get_linked_object(self): + return self.context['project'] + + def get_path_in_archive(self, name): + # Get ID of old project_type from archive + return str(self.context.get('project_id') or self.get_linked_object().id) + '-files/' + name + + +class UploadedAssetExportImportSerializer(FileExportImportSerializer): + class Meta(FileExportImportSerializer.Meta): + model = UploadedAsset + + def get_linked_object(self): + return self.context['project_type'] + + def get_path_in_archive(self, name): + # Get ID of old project_type from archive + return str(self.context.get('project_type_id') or self.get_linked_object().id) + '-assets/' + name + + +class ProjectTypeExportImportSerializer(ExportImportSerializer): + format = FormatField('projecttypes/v1') + assets = UploadedAssetExportImportSerializer(many=True) + + class Meta: + model = ProjectType + fields = [ + 'format', 'id', 'created', 'updated', 'name', 'language', + 'report_fields', 'report_sections', 'finding_fields', 'finding_field_order', + 'report_template', 'report_styles', 'report_preview_data', + 'assets' + ] + extra_kwargs = {'id': {'read_only': False}, 'created': {'read_only': False}} + + def export_files(self) -> Iterable[tuple[str, File]]: + af = self.fields['assets'] + self.context.update({'project_type': self.instance}) + af.instance = list(af.get_attribute(self.instance).all()) + yield from af.export_files() + + def create(self, validated_data): + old_id = validated_data.pop('id') + assets = validated_data.pop('assets', []) + project_type = super().create({ + 'source': SourceEnum.IMPORTED, + } | validated_data) + self.context.update({'project_type': project_type, 'project_type_id': old_id}) + self.fields['assets'].create(assets) + return project_type + + +class PentestFindingExportImportSerializer(ExportImportSerializer): + id = serializers.UUIDField(source='finding_id') + assignee = RelatedUserIdExportImportSerializer() + template = OptionalPrimaryKeyRelatedField(queryset=FindingTemplate.objects.all(), source='template_id') + data = serializers.DictField(source='data_all') + + class Meta: + model = PentestFinding + fields = [ + 'id', 'created', 'updated', 'assignee', 'status', 'template', 'data', + ] + extra_kwargs = {'created': {'read_only': False}} + + def create(self, validated_data): + project = self.context['project'] + data = validated_data.pop('data_all', {}) + template = validated_data.pop('template_id', None) + finding = PentestFinding(**{ + 'project': project, + 'template_id': template.id if template else None, + } | validated_data) + finding.update_data(ensure_defined_structure( + value=data, + definition=project.project_type.finding_fields_obj, + handle_undefined=HandleUndefinedFieldsOptions.FILL_NONE, + include_undefined=True) + ) + finding.save() + return finding + + +class ReportSectionExportImportSerializer(ExportImportSerializer): + id = serializers.CharField(source='section_id') + assignee = RelatedUserIdExportImportSerializer() + + class Meta: + model = ReportSection + fields = [ + 'id', 'created', 'updated', 'assignee', 'status', + ] + extra_kwargs = {'created': {'read_only': False}} + + +class NotebookPageExportImportSerializer(ExportImportSerializer): + id = serializers.UUIDField(source='note_id') + parent = serializers.UUIDField(source='parent.note_id', allow_null=True) + + class Meta: + model = NotebookPage + fields = [ + 'id', 'created', 'updated', + 'title', 'text', 'checked', 'icon_emoji', 'status_emoji', + 'order', 'parent', + ] + extra_kwargs = { + 'created': {'read_only': False}, + 'icon_emoji': {'required': False}, + 'status_emoji': {'required': False}, + } + + +class NotebookPageListExportImportSerializer(serializers.ListSerializer): + child = NotebookPageExportImportSerializer() + + def create(self, validated_data): + instances = [NotebookPage(project=self.context['project'], **omit_keys(d, ['parent'])) for d in validated_data] + for i, d in zip(instances, validated_data): + if d.get('parent'): + i.parent = next(filter(lambda e: e.note_id == d.get('parent', {}).get('note_id'), instances), None) + + NotebookPage.objects.check_parent_and_order(instances) + NotebookPage.objects.bulk_create(instances) + return instances + + +class PentestProjectExportImportSerializer(ExportImportSerializer): + format = FormatField('projects/v1') + members = ProjectMemberListExportImportSerializer(source='*', required=False) + pentesters = ProjectMemberListExportImportSerializer(required=False, write_only=True) + project_type = ProjectTypeExportImportSerializer() + report_data = serializers.DictField(source='data_all') + sections = ReportSectionExportImportSerializer(many=True) + findings = PentestFindingExportImportSerializer(many=True) + notes = NotebookPageListExportImportSerializer(required=False) + images = UploadedImageExportImportSerializer(many=True) + files = UploadedProjectFileExportImportSerializer(many=True, required=False) + + class Meta: + model = PentestProject + fields = [ + 'format', 'id', 'created', 'updated', 'name', 'language', + 'members', 'pentesters', 'project_type', + 'report_data', 'sections', 'findings', 'notes', 'images', 'files', + ] + extra_kwargs = {'id': {'read_only': False}, 'created': {'read_only': False}} + + def get_fields(self): + fields = super().get_fields() + if not self.context.get('export_all', True): + del fields['notes'] + del fields['files'] + return fields + + def export_files(self) -> Iterable[tuple[str, File]]: + self.fields['project_type'].instance = self.instance.project_type + yield from self.fields['project_type'].export_files() + + self.context.update({'project': self.instance}) + + imgf = self.fields['images'] + imgf.instance = list(imgf.get_attribute(self.instance).all()) + yield from imgf.export_files() + + if ff := self.fields.get('files'): + ff.instance = list(ff.get_attribute(self.instance).all()) + yield from ff.export_files() + + def create(self, validated_data): + old_id = validated_data.pop('id') + members = validated_data.pop('members', validated_data.pop('pentesters', [])) + project_type_data = validated_data.pop('project_type', {}) + sections = validated_data.pop('sections', []) + findings = validated_data.pop('findings', []) + notes = validated_data.pop('notes', []) + report_data = validated_data.pop('data_all', {}) + images_data = validated_data.pop('images', []) + files_data = validated_data.pop('files', []) + + project_type = self.fields['project_type'].create(project_type_data | { + 'source': SourceEnum.IMPORTED_DEPENDENCY, + }) + project = super().create(validated_data | { + 'project_type': project_type, + 'imported_members': list(filter(lambda u: isinstance(u, dict), members)), + 'source': SourceEnum.IMPORTED, + 'custom_fields': ensure_defined_structure( + value=report_data, + definition=project_type.report_fields_obj, + handle_undefined=HandleUndefinedFieldsOptions.FILL_NONE, + include_undefined=True + ), + }) + project_type.linked_project = project + project_type.save() + + member_infos = list(filter(lambda u: isinstance(u, ProjectMemberInfo), members)) + for mi in member_infos: + mi.project = project + ProjectMemberInfo.objects.bulk_create(member_infos) + + self.context.update({'project': project, 'project_id': old_id}) + + for section in project.sections.all(): + if section_data := next(filter(lambda s: s.get('section_id') == section.section_id, sections), None): + self.fields['sections'].child.update(section, section_data) + + self.fields['findings'].create(findings) + self.fields['notes'].create(notes) + self.fields['images'].create(images_data) + self.fields['files'].create(files_data) + + return project + diff --git a/api/src/reportcreator_api/conf/admin.py b/api/src/reportcreator_api/conf/admin.py new file mode 100644 index 0000000..5bfb045 --- /dev/null +++ b/api/src/reportcreator_api/conf/admin.py @@ -0,0 +1,12 @@ +from django.contrib.admin.apps import AdminConfig as AdminConfigBase +from django.contrib.admin.sites import AdminSite as AdminSiteBase + + +class AdminConfig(AdminConfigBase): + default_site = 'reportcreator_api.conf.admin.AdminSite' + + +class AdminSite(AdminSiteBase): + def has_permission(self, request): + return request.user and not request.user.is_anonymous and request.user.is_admin + diff --git a/api/src/reportcreator_api/conf/asgi.py b/api/src/reportcreator_api/conf/asgi.py new file mode 100644 index 0000000..a13e846 --- /dev/null +++ b/api/src/reportcreator_api/conf/asgi.py @@ -0,0 +1,16 @@ +""" +ASGI config for reportcreator_api project. + +It exposes the ASGI callable as a module-level variable named ``application``. + +For more information on this file, see +https://docs.djangoproject.com/en/4.0/howto/deployment/asgi/ +""" + +import os + +from django.core.asgi import get_asgi_application + +os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'reportcreator_api.conf.settings') + +application = get_asgi_application() diff --git a/api/src/reportcreator_api/conf/celery.py b/api/src/reportcreator_api/conf/celery.py new file mode 100644 index 0000000..2a71abe --- /dev/null +++ b/api/src/reportcreator_api/conf/celery.py @@ -0,0 +1,29 @@ +import os +from celery import Celery, signals + + +os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'reportcreator_api.conf.settings') + + +celery_app = Celery( + 'reportcreator', + fixups=Celery.builtin_fixups | { + 'reportcreator_api.tasks.rendering.celery_worker:SecureWorkerFixup' + } +) + +# Using a string here means the worker doesn't have to serialize +# the configuration object to child processes. +# - namespace='CELERY' means all celery-related configuration keys +# should have a `CELERY_` prefix. +celery_app.config_from_object('django.conf:settings', namespace='CELERY') + +# Load task modules from all registered Django apps. +celery_app.autodiscover_tasks() + + +@signals.setup_logging.connect() +def setup_logging(*args, **kwargs): + import logging.config + from django.conf import settings + logging.config.dictConfig(settings.LOGGING) diff --git a/api/src/reportcreator_api/conf/settings.py b/api/src/reportcreator_api/conf/settings.py new file mode 100644 index 0000000..955dfe7 --- /dev/null +++ b/api/src/reportcreator_api/conf/settings.py @@ -0,0 +1,561 @@ +""" +Django settings for reportcreator_api project. + +Generated by 'django-admin startproject' using Django 4.0.4. + +For more information on this file, see +https://docs.djangoproject.com/en/4.0/topics/settings/ + +For the full list of settings and their values, see +https://docs.djangoproject.com/en/4.0/ref/settings/ +""" + +from datetime import timedelta +from decouple import config +from pathlib import Path +import json +from urllib.parse import urljoin + +# Build paths inside the project like this: BASE_DIR / 'subdir'. +BASE_DIR = Path(__file__).resolve().parent.parent.parent +MEDIA_ROOT = config('MEDIA_ROOT', default=BASE_DIR / 'data', cast=Path) + + +# Quick-start development settings - unsuitable for production +# See https://docs.djangoproject.com/en/4.0/howto/deployment/checklist/ + +# SECURITY WARNING: keep the secret key used in production secret! +SECRET_KEY = config('SECRET_KEY', default='django-insecure-ygvn9(x==kcv#r%pccf4rlzyz7_1v1b83$19&b2lsj6uz$mbro') + +# SECURITY WARNING: don't run with debug turned on in production! +DEBUG = config('DEBUG', cast=bool, default=False) + +ALLOWED_HOSTS = ['*'] +APPEND_SLASH = True + +# Application definition + +INSTALLED_APPS = [ + 'django.contrib.auth', + 'django.contrib.contenttypes', + 'django.contrib.messages', + 'django.contrib.staticfiles', + 'django.contrib.sessions', + + 'rest_framework', + 'django_filters', + 'adrf', + + 'reportcreator_api', + 'reportcreator_api.users', + 'reportcreator_api.pentests', + 'reportcreator_api.notifications', + 'reportcreator_api.tasks', + 'reportcreator_api.conf.admin.AdminConfig', + 'reportcreator_api.api_utils', +] + +MIDDLEWARE = [ + 'whitenoise.middleware.WhiteNoiseMiddleware', + + 'reportcreator_api.utils.logging.RequestLoggingMiddleware', + 'django.middleware.security.SecurityMiddleware', + 'django.contrib.sessions.middleware.SessionMiddleware', + 'reportcreator_api.utils.middleware.ExtendSessionMiddleware', + 'django.middleware.common.CommonMiddleware', + 'django.middleware.csrf.CsrfViewMiddleware', + 'django.contrib.auth.middleware.AuthenticationMiddleware', + 'reportcreator_api.utils.middleware.AdminSessionMiddleware', + 'django.contrib.messages.middleware.MessageMiddleware', + 'django.middleware.clickjacking.XFrameOptionsMiddleware', + 'csp.middleware.CSPMiddleware', + 'reportcreator_api.utils.middleware.CacheControlMiddleware', + 'reportcreator_api.utils.middleware.PermissionsPolicyMiddleware', +] + +ROOT_URLCONF = 'reportcreator_api.conf.urls' + +TEMPLATES = [ + { + 'BACKEND': 'django.template.backends.django.DjangoTemplates', + 'DIRS': [BASE_DIR / 'frontend'], + 'APP_DIRS': True, + 'OPTIONS': { + 'context_processors': [ + 'django.template.context_processors.debug', + 'django.template.context_processors.request', + 'django.contrib.auth.context_processors.auth', + 'django.contrib.messages.context_processors.messages', + ], + }, + }, +] + +REST_FRAMEWORK = { + # Use Django's standard `django.contrib.auth` permissions, + # or allow read-only access for unauthenticated users. + 'DEFAULT_PERMISSION_CLASSES': [ + 'rest_framework.permissions.IsAuthenticated', + ], + 'DEFAULT_AUTHENTICATION_CLASSES': [ + 'rest_framework.authentication.SessionAuthentication', + ], + 'DEFAULT_THROTTLE_CLASSES': [ + 'reportcreator_api.utils.throttling.ScopedUserRateThrottle', + ], + 'DEFAULT_THROTTLE_RATES': { + 'pdf': '3/10s', + }, + 'DEFAULT_PAGINATION_CLASS': 'rest_framework.pagination.CursorPagination', + 'EXCEPTION_HANDLER': 'reportcreator_api.utils.api.exception_handler', + 'PAGE_SIZE': 100, + 'UNICODE_JSON': False, +} + + +WSGI_APPLICATION = 'reportcreator_api.conf.wsgi.application' + + +# Database +# https://docs.djangoproject.com/en/4.0/ref/settings/#databases + +DATABASES = { + 'default': { + 'ENGINE': config('DATABASE_ENGINE', default='django.db.backends.postgresql'), + 'HOST': config('DATABASE_HOST', default=''), + 'PORT': config('DATABASE_PORT', default='5432'), + 'NAME': config('DATABASE_NAME', default=''), + 'USER': config('DATABASE_USER', default=''), + 'PASSWORD': config('DATABASE_PASSWORD', default=''), + 'DISABLE_SERVER_SIDE_CURSORS': True, + 'OPTIONS': { + 'prepare_threshold': None, + } + }, +} + + +# Password validation +# https://docs.djangoproject.com/en/4.0/ref/settings/#auth-password-validators + +AUTH_PASSWORD_VALIDATORS = [ + { + 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', + }, + { + 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', + }, + { + 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', + }, + { + 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', + }, +] + +# Login URL of SPA frontend +LOGIN_URL = '/login/' + +SESSION_ENGINE = 'reportcreator_api.users.backends.session' +SESSION_EXPIRE_AT_BROWSER_CLOSE = True +SESSION_COOKIE_AGE = timedelta(hours=14).seconds +SESSION_COOKIE_HTTPONLY = True +SESSION_COOKIE_SAMESITE = 'Strict' +CSRF_COOKIE_SAMESITE = 'Strict' +CSRF_TRUSTED_ORIGINS = ['https://*', 'http://*'] + +MFA_SERVER_NAME = config('MFA_SERVER_NAME', default='SysReptor') +# FIDO2 RP ID: the domain name of the instance +MFA_FIDO2_RP_ID = config('MFA_FIDO2_RP_ID', default='') +MFA_LOGIN_TIMEOUT = timedelta(minutes=5) +SENSITIVE_OPERATION_REAUTHENTICATION_TIMEOUT = timedelta(minutes=15) + +import fido2.features +fido2.features.webauthn_json_mapping.enabled = True + + + +AUTHLIB_OAUTH_CLIENTS = {} +OIDC_AZURE_CLIENT_ID = config('OIDC_AZURE_CLIENT_ID', default=None) +OIDC_AZURE_CLIENT_SECRET = config('OIDC_AZURE_CLIENT_SECRET', default=None) +OIDC_AZURE_TENANT_ID = config('OIDC_AZURE_TENANT_ID', default=None) +if OIDC_AZURE_CLIENT_ID and OIDC_AZURE_CLIENT_SECRET and OIDC_AZURE_TENANT_ID: + AUTHLIB_OAUTH_CLIENTS |= { + 'azure': { + 'label': 'Azure AD', + 'client_id': OIDC_AZURE_CLIENT_ID, + 'client_secret': OIDC_AZURE_CLIENT_SECRET, + 'server_metadata_url': f'https://login.microsoftonline.com/{OIDC_AZURE_TENANT_ID}/v2.0/.well-known/openid-configuration', + 'client_kwargs': { + 'scope': 'openid email profile', + 'code_challenge_method': 'S256', + }, + }, + } +if oidc_config := config('OIDC_AUTHLIB_OAUTH_CLIENTS', cast=json.loads, default="{}"): + AUTHLIB_OAUTH_CLIENTS |= oidc_config + +# Internationalization +# https://docs.djangoproject.com/en/4.0/topics/i18n/ + +LANGUAGE_CODE = 'en-us' + +TIME_ZONE = 'UTC' + +USE_I18N = True + +USE_TZ = True + + +# Static files (CSS, JavaScript, Images) +# https://docs.djangoproject.com/en/4.0/howto/static-files/ + +MEDIA_URL = 'data/' +STATIC_URL = 'static/' +STATIC_ROOT = BASE_DIR / 'static' +STATICFILES_DIRS = [ + BASE_DIR / 'frontend' / 'static', +] + +UPLOADED_FILE_STORAGE = config('UPLOADED_FILE_STORAGE', default='filesystem') +UPLOADED_FILE_STORAGE = { + 'filesystem': 'reportcreator_api.utils.storages.EncryptedFileSystemStorage', + 's3': 'reportcreator_api.utils.storages.EncryptedS3SystemStorage', +}.get(UPLOADED_FILE_STORAGE, UPLOADED_FILE_STORAGE) + +STORAGES = { + 'staticfiles': { + 'BACKEND': 'django.contrib.staticfiles.storage.ManifestStaticFilesStorage', + }, + 'uploaded_images': { + 'BACKEND': 'reportcreator_api.utils.storages.EncryptedFileSystemStorage', + 'OPTIONS': { + 'location': config('UPLOADED_IMAGE_LOCATION', default=MEDIA_ROOT / 'uploadedimages', cast=Path), + }, + }, + 'uploaded_assets': { + 'BACKEND': 'reportcreator_api.utils.storages.EncryptedFileSystemStorage', + 'OPTIONS': { + 'location': config('UPLOADED_ASSET_LOCATION', default=MEDIA_ROOT / 'uploadedassets', cast=Path) + }, + }, + 'uploaded_files': { + 'BACKEND': UPLOADED_FILE_STORAGE, + 'OPTIONS': { + 'location': config('UPLOADED_FILE_LOCATION', default=MEDIA_ROOT / 'uploadedfiles', cast=Path), + 'access_key': config('UPLOADED_FILE_S3_ACCESS_KEY', default=''), + 'secret_key': config('UPLOADED_FILE_S3_SECRET_KEY', default=''), + 'security_token': config('UPLOADED_FILE_S3_SESSION_TOKEN', default=None), + 'bucket_name': config('UPLOADED_FILE_S3_BUCKET_NAME', default=''), + 'endpoint_url': config('UPLOADED_FILE_S3_ENDPOINT_URL', default=''), + }, + }, + 'archived_files': { + 'BACKEND': config('ARCHIVED_FILE_STORAGE', default='reportcreator_api.utils.storages.UnencryptedFileSystemStorage'), + 'OPTIONS': { + 'location': config('ARCHIVED_FILE_LOCATION', default=MEDIA_ROOT / 'archivedfiles', cast=Path), + 'access_key': config('ARCHIVED_FILE_S3_ACCESS_KEY', default=''), + 'secret_key': config('ARCHIVED_FILE_S3_SECRET_KEY', default=''), + 'security_token': config('ARCHIVED_FILE_S3_SESSION_TOKEN', default=None), + 'bucket_name': config('ARCHIVED_FILE_S3_BUCKET_NAME', default=''), + 'endpoint_url': config('ARCHIVED_FILE_S3_ENDPOINT_URL', default=''), + }, + }, +} + +from pillow_heif import register_heif_opener +register_heif_opener() + + +# Default primary key field type +# https://docs.djangoproject.com/en/4.0/ref/settings/#default-auto-field +DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField' +AUTH_USER_MODEL = 'users.PentestUser' + + +# HTTP Header settings +SECURE_CONTENT_TYPE_NOSNIFF = True +SECURE_CROSS_ORIGIN_OPENER_POLICY = 'same-origin' +SECURE_REFERRER_POLICY = 'same-origin' +X_FRAME_OPTIONS = 'SAMEORIGIN' + +CSP_DEFAULT_SRC = ["'none'"] +CSP_IMG_SRC = ["'self'", "data:"] +CSP_FONT_SRC = ["'self'"] +CSP_WORKER_SRC = ["'self'"] +CSP_CONNECT_SRC = ["'self'"] +CSP_FRAME_SRC = ["'self'"] +CSP_FRAME_ANCESTORS = ["'self'"] +# nuxt, vuetify and markdown preview use inline styles +CSP_STYLE_SRC = ["'self'", "'unsafe-inline'"] +# unsafe-inline: +# Django Rest Framework inserts the CSRF token via an inline script. DRF will be CSP-compliant in version 3.15 (see https://github.com/encode/django-rest-framework/pull/8784) +# NuxtJS injects a inline script in index.html +# unsafe-eval: +# Used by nuxt-vuex-localstorage; PR exists, but maintainer is not very active (see https://github.com/rubystarashe/nuxt-vuex-localstorage/issues/37) +CSP_SCRIPT_SRC = ["'self'", "'unsafe-inline'", "'unsafe-eval'"] + +PERMISSIONS_POLICY = { + 'publickey-credentials-get': '(self)', + 'clipboard-write': '(self)', + 'accelerometer': '()', + 'ambient-light-sensor': '()', + 'autoplay': '()', + 'battery': '()', + 'camera': '()', + 'cross-origin-isolated': '()', + 'display-capture': '()', + 'document-domain': '()', + 'encrypted-media': '()', + 'execution-while-not-rendered': '()', + 'execution-while-out-of-viewport': '()', + 'fullscreen': '()', + 'geolocation': '()', + 'gyroscope': '()', + 'keyboard-map': '()', + 'magnetometer': '()', + 'microphone': '()', + 'midi': '()', + 'navigation-override': '()', + 'payment': '()', + 'picture-in-picture': '()', + 'screen-wake-lock': '()', + 'sync-xhr': '()', + 'usb': '()', + 'web-share': '()', + 'xr-spatial-tracking': '()', + 'clipboard-read': '()', + 'gamepad': '()', + 'speaker-selection': '()', +} + + +# Generate HTTPS URIs in responses for requests behind a reverse proxy +SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https') + + +# Monkey-Patch django to disable CSRF everywhere +# CSRF middlware class is used as middleware and internally by DjangoRestFramework +from django.middleware import csrf +from reportcreator_api.utils.middleware import CustomCsrfMiddleware +csrf.CsrfViewMiddleware = CustomCsrfMiddleware + + +PDF_RENDER_SCRIPT_PATH = config('PDF_RENDER_SCRIPT_PATH', cast=Path, default=BASE_DIR / '..' / 'rendering' / 'dist' / 'bundle.js') +CHROMIUM_EXECUTABLE = config('CHROMIUM_EXECUTABLE', default=None) + + +# Celery client settings +CELERY_BROKER_URL = config('CELERY_BROKER_URL', default='') +CELERY_BROKER_URL_FILE = config('CELERY_BROKER_URL_FILE', default=None) +if not CELERY_BROKER_URL and CELERY_BROKER_URL_FILE: + CELERY_BROKER_URL = Path(CELERY_BROKER_URL_FILE).read_text() +CELERY_RESULT_BACKEND = config('CELERY_RESULT_BACKEND', default='rpc://') + + +CELERY_RESULT_EXPIRES = timedelta(seconds=30) +CELERY_TASK_DEFAULT_EXCHANGE = 'tasks' +CELERY_TASK_QUEUES_NO_DECLARE = config('CELERY_TASK_QUEUES_NO_DECLARE', cast=bool, default=False) +from kombu import Queue +CELERY_TASK_QUEUES = [ + Queue('rendering', routing_key='tasks.rendering', no_declare=CELERY_TASK_QUEUES_NO_DECLARE), +] +CELERY_TASK_ROUTES = { + 'reportcreator.render_pdf': { + 'exchange': CELERY_TASK_DEFAULT_EXCHANGE, + 'queue': 'rendering', + 'routing_key': 'tasks.rendering', + }, +} + + +# Celery worker settings +CELERY_SECURE_WORKER = config('CELERY_SECURE_WORKER', cast=bool, default=False) +if CELERY_SECURE_WORKER: + CELERY_WORKER_POOL = 'prefork' + CELERY_WORKER_CONCURRENCY = 1 + CELERY_WORKER_MAX_TASKS_PER_CHILD = 1 + CELERY_WORKER_PREFETCH_MULTIPLIER = 1 + CELERY_BROKER_POOL_LIMIT = 0 + CELERY_TASK_ACKS_LATE = False + CELERY_WORKER_ENABLE_REMOTE_CONTROL = True + + +CELERY_WORKER_HIJACK_ROOT_LOGGER=False +CELERY_WORKER_SEND_TASK_EVENTS = False +CELERY_TASK_TIME_LIMIT = 60 * 5 +CELERY_TASK_SOFT_TIME_LIMIT = 60 * 5 + 10 + +# Execute tasks locally, if no broker is configured +CELERY_TASK_ALWAYS_EAGER = not CELERY_BROKER_URL + + + +# Periodic tasks +PERIODIC_TASKS = [ + { + 'id': 'fetch_notifications', + 'task': 'reportcreator_api.notifications.tasks.fetch_notifications', + 'schedule': timedelta(days=1), + }, + { + 'id': 'clear_sessions', + 'task': 'reportcreator_api.utils.tasks.clear_sessions', + 'schedule': timedelta(days=1), + }, + { + 'id': 'cleanup_unreferenced_images_and_files', + 'task': 'reportcreator_api.pentests.tasks.cleanup_unreferenced_images_and_files', + 'schedule': timedelta(days=1), + }, + { + 'id': 'reset_stale_archive_restores', + 'task': 'reportcreator_api.pentests.tasks.reset_stale_archive_restores', + 'schedule': timedelta(days=1), + } +] + + +# MAX_LOCK_TIME should not be less than 1.30min, because some browsers (Chromium) triggers timers only once per minute if the browser tab is inactive +MAX_LOCK_TIME = timedelta(seconds=90) + +SPELLCHECK_URL = config('SPELLCHECK_URL', default=None) +SPELLCHECK_DICTIONARY_PER_USER = config('SPELLCHECK_DICTIONARY_PER_USER', cast=bool, default=False) + +BACKUP_KEY = config('BACKUP_KEY', default=None) + +COMPRESS_IMAGES = config('COMPRESS_IMAGES', cast=bool, default=True) + + +from reportcreator_api.archive.crypto import EncryptionKey +ENCRYPTION_KEYS = EncryptionKey.from_json_list(config('ENCRYPTION_KEYS', default='')) +DEFAULT_ENCRYPTION_KEY_ID = config('DEFAULT_ENCRYPTION_KEY_ID', default=None) +ENCRYPTION_PLAINTEXT_FALLBACK = config('ENCRYPTION_PLAINTEXT_FALLBACK', cast=bool, default=True) + +GUEST_USERS_CAN_IMPORT_PROJECTS = config('GUEST_USERS_CAN_IMPORT_PROJECTS', default=False) +GUEST_USERS_CAN_CREATE_PROJECTS = config('GUEST_USERS_CAN_CREATE_PROJECTS', default=True) +GUEST_USERS_CAN_DELETE_PROJECTS = config('GUEST_USERS_CAN_DELETE_PROJECTS', default=True) +GUEST_USERS_CAN_UPDATE_PROJECT_SETTINGS = config('GUEST_USERS_CAN_UPDATE_PROJECT_SETTINGS', default=True) + +ENABLE_PRIVATE_DESIGNS = config('ENABLE_PRIVATE_DESIGNS', cast=bool, default=False) + +ARCHIVING_THRESHOLD = config('ARCHIVING_THRESHOLD', cast=int, default=2) +assert ARCHIVING_THRESHOLD > 0 + +# Health checks +HEALTH_CHECKS = { + 'cache': 'reportcreator_api.api_utils.healthchecks.check_cache', + 'database': 'reportcreator_api.api_utils.healthchecks.check_database', + 'migrations': 'reportcreator_api.api_utils.healthchecks.check_migrations', +} + +# Notifications +VERSION = config('VERSION', default='dev') +INSTANCE_TAGS = config('INSTANCE_TAGS', default='on-premise').split(';') +NOTIFICATION_IMPORT_URL = config('NOTIFICATION_IMPORT_URL', default='https://cloud.sysreptor.com/api/v1/notifications/') + + +# License +LICENSE = config('LICENSE', default=None) +LICENSE_VALIDATION_KEYS = [ + {'id': 'amber', 'algorithm': 'ed25519', 'key': 'MCowBQYDK2VwAyEAkqCS3lZbrzh+2mKTYymqPHtKBrh8glFxnj9OcoQR9xQ='}, + {'id': 'silver', 'algorithm': 'ed25519', 'key': 'MCowBQYDK2VwAyEAwu/cl0CZSSBFOzFSz/hhUQQjHIKiT4RS3ekPevSKn7w='}, +] +LICENSE_COMMUNITY_MAX_USERS = 3 + + +# Elastic APM +ELASTIC_APM_ENABLED = config('ELASTIC_APM_ENABLED', cast=bool, default=False) +ELASTIC_APM = { + 'ENABLED': ELASTIC_APM_ENABLED, + 'SERVICE_NAME': config('ELASTIC_APM_SERVICE_NAME', default=''), + 'SERVICE_TOKEN': config('ELASTIC_APM_SERVICE_TOKEN', default=''), + 'SERVER_URL': config('ELASTIC_APM_SERVER_URL', default=''), + 'SPAN_COMPRESSION_ENABLED': False, + 'DJANGO_AUTOINSERT_MIDDLEWARE': False, + 'DJANGO_TRANSACTION_NAME_FROM_ROUTE': True, +} +if ELASTIC_APM_ENABLED: + INSTALLED_APPS.append('elasticapm.contrib.django') + MIDDLEWARE.insert(1, 'elasticapm.contrib.django.middleware.TracingMiddleware') + +ELASTIC_APM_RUM_ENABLED = config('ELASTIC_APM_RUM_ENABLED', cast=bool, default=False) +ELASTIC_APM_RUM_CONFIG = { + 'active': ELASTIC_APM_RUM_ENABLED, + 'serviceName': config('ELASTIC_APM_RUM_SERVICE_NAME', default=''), + 'serverUrl': config('ELASTIC_APM_RUM_SERVER_URL', default=''), + 'serviceVersion': 'dev', +} +if ELASTIC_APM_RUM_ENABLED: + CSP_CONNECT_SRC.append(ELASTIC_APM_RUM_CONFIG['serverUrl']) + + +if DEBUG: + INSTALLED_APPS += [ + 'debug_toolbar', + ] + MIDDLEWARE += [ + 'debug_toolbar.middleware.DebugToolbarMiddleware', + ] + INTERNAL_IPS = type(str('c'), (), {'__contains__': lambda *a: True})() + + + +logging_handlers = ['console'] + (['elasticapm'] if ELASTIC_APM_ENABLED else []) +LOGGING = { + 'version': 1, + 'disabled_existing_loggers': False, + 'formatters': { + 'default': { + 'class': 'logging.Formatter', + 'format': '%(asctime)s [%(levelname)s] %(name)s: %(message)s' + }, + }, + 'handlers': { + 'console': { + 'level': 'DEBUG', + 'formatter': 'default', + 'class': 'logging.StreamHandler', + }, + 'elasticapm': { + 'level': 'WARNING', + 'class': 'elasticapm.contrib.django.handlers.LoggingHandler', + }, + }, + 'root': { + 'level': 'INFO', + 'handlers': logging_handlers, + }, + 'loggers': { + 'celery': { + 'level': 'WARNING', + 'handlers': logging_handlers, + 'propagate': False, + }, + 'celery.worker.strategy': { + 'level': 'INFO', + 'handlers': logging_handlers, + 'propagate': False, + }, + 'weasyprint': { + 'level': 'ERROR', + 'handlers': logging_handlers, + 'propagate': False, + }, + 'playwright': { + 'level': 'WARNING', + 'hanlders': logging_handlers, + 'propagate': False, + }, + 'pikepdf': { + 'level': 'WARNING', + 'handlers': logging_handlers, + 'propagate': False, + }, + 'fontTools': { + 'level': 'WARNING', + 'handlers': logging_handlers, + 'propagate': False, + }, + } +} \ No newline at end of file diff --git a/api/src/reportcreator_api/conf/settings_test.py b/api/src/reportcreator_api/conf/settings_test.py new file mode 100644 index 0000000..8467dab --- /dev/null +++ b/api/src/reportcreator_api/conf/settings_test.py @@ -0,0 +1,30 @@ +from reportcreator_api.conf.settings import * + + +STORAGES = STORAGES | { + 'uploaded_images': {'BACKEND': 'django.core.files.storage.InMemoryStorage'}, + 'uploaded_assets': {'BACKEND': 'django.core.files.storage.InMemoryStorage'}, + 'uploaded_files': {'BACKEND': 'django.core.files.storage.InMemoryStorage'}, + 'archived_files': {'BACKEND': 'django.core.files.storage.InMemoryStorage'}, +} + + +REST_FRAMEWORK['DEFAULT_THROTTLE_CLASSES'] = [] +REST_FRAMEWORK['TEST_REQUEST_DEFAULT_FORMAT'] = 'json' + + +AUTHLIB_OAUTH_CLIENTS = {} +ELASTIC_APM_ENABLED = False +ELASTIC_APM_RUM_ENABLED = False +CELERY_TASK_ALWAYS_EAGER = True +NOTIFICATION_IMPORT_URL = None + +ENABLE_PRIVATE_DESIGNS = True +ARCHIVING_THRESHOLD = 1 + +BACKUP_KEY = 'dummy-backup-key-used-in-unit-test' + + +# Disable license check +from reportcreator_api.utils import license +license.check_license = lambda: {'type': license.LicenseType.PROFESSIONAL, 'users': 1000} diff --git a/api/src/reportcreator_api/conf/urls.py b/api/src/reportcreator_api/conf/urls.py new file mode 100644 index 0000000..ca5fbd2 --- /dev/null +++ b/api/src/reportcreator_api/conf/urls.py @@ -0,0 +1,88 @@ +from django.conf import settings +from django.conf.urls.static import static +from django.contrib import admin +from django.urls import path, include, re_path +from django.http import HttpResponse +from django.views.generic.base import TemplateView, RedirectView +from rest_framework.routers import DefaultRouter +from rest_framework_nested.routers import NestedSimpleRouter + +from reportcreator_api.api_utils.views import SpellcheckWordView, UtilsViewSet, SpellcheckView, HealthcheckView +from reportcreator_api.pentests.views import ArchivedProjectKeyPartViewSet, ArchivedProjectViewSet, FindingTemplateViewSet, PentestFindingViewSet, PentestProjectViewSet, ProjectNotebookPageViewSet, \ + PentestProjectPreviewView, PentestProjectGenerateView, \ + ProjectTypeViewSet, ProjectTypePreviewView, \ + ReportSectionViewSet, UploadedAssetViewSet, UploadedImageViewSet, UploadedProjectFileViewSet, UploadedUserNotebookImageViewSet, UserNotebookPageViewSet, UserPublicKeyViewSet +from reportcreator_api.users.views import PentestUserViewSet, MFAMethodViewSet, AuthViewSet, AuthIdentityViewSet +from reportcreator_api.notifications.views import NotificationViewSet + + +router = DefaultRouter() +router.register('pentestusers', PentestUserViewSet, basename='pentestuser') +router.register('pentestusers/self/notes/images', UploadedUserNotebookImageViewSet, basename='uploadedusernotebookimage') +router.register('pentestusers/self/notes', UserNotebookPageViewSet, basename='usernotebookpage') +router.register('projecttypes', ProjectTypeViewSet, basename='projecttype') +router.register('pentestprojects', PentestProjectViewSet, basename='pentestproject') +router.register('archivedprojects', ArchivedProjectViewSet, basename='archivedproject') +router.register('findingtemplates', FindingTemplateViewSet, basename='findingtemplate') +router.register('utils', UtilsViewSet, basename='utils') +router.register('auth', AuthViewSet, basename='auth') + +user_router = NestedSimpleRouter(router, 'pentestusers', lookup='pentestuser') +user_router.register('mfa', MFAMethodViewSet, basename='mfamethod') +user_router.register('identities', AuthIdentityViewSet, basename='authidentity') +user_router.register('notifications', NotificationViewSet, basename='notification') +user_router.register('publickeys', UserPublicKeyViewSet, basename='userpublickey') + +project_router = NestedSimpleRouter(router, 'pentestprojects', lookup='project') +project_router.register('sections', ReportSectionViewSet, basename='section') +project_router.register('findings', PentestFindingViewSet, basename='finding') +project_router.register('notes', ProjectNotebookPageViewSet, basename='projectnotebookpage') +project_router.register('images', UploadedImageViewSet, basename='uploadedimage') +project_router.register('files', UploadedProjectFileViewSet, basename='uploadedprojectfile') + +projecttype_router = NestedSimpleRouter(router, 'projecttypes', lookup='projecttype') +projecttype_router.register('assets', UploadedAssetViewSet, basename='uploadedasset') + +archivedproject_router = NestedSimpleRouter(router, 'archivedprojects', lookup='archivedproject') +archivedproject_router.register('keyparts', ArchivedProjectKeyPartViewSet, basename='archivedprojectkeypart') + +# Make trailing slash in URL optional to support loading images and assets by fielname +router.trailing_slash = '/?' +project_router.trailing_slash = '/?' +projecttype_router.trailing_slash = '/?' +archivedproject_router.trailing_slash = '/?' + + +urlpatterns = [ + path('admin/login/', RedirectView.as_view(url='/users/self/admin/enable/')), + path('admin/', admin.site.urls), + re_path(r'^api/?$', RedirectView.as_view(url='/api/v1/')), + path('api/v1/', include([ + path('', include(router.urls)), + path('', include(user_router.urls)), + path('', include(project_router.urls)), + path('', include(projecttype_router.urls)), + path('', include(archivedproject_router.urls)), + + # Async views + path('utils/spellcheck/', SpellcheckView.as_view(), name='utils-spellcheck'), + path('utils/spellcheck/words/', SpellcheckWordView.as_view(), name='utils-spellcheck-words'), + path('utils/healthcheck/', HealthcheckView.as_view(), name='utils-healthcheck'), + path('pentestprojects//preview/', PentestProjectPreviewView.as_view(), name='pentestproject-preview'), + path('pentestprojects//generate/', PentestProjectGenerateView.as_view(), name='pentestproject-generate'), + path('projecttypes//preview/', ProjectTypePreviewView.as_view(), name='projecttype-preview'), + ])), + + # Static files + path('robots.txt', lambda *args, **kwargs: HttpResponse("User-Agent: *\nDisallow: /\n", content_type="text/plain")), + + # Fallback URL for SPA + re_path(r'^(?!(api|admin)).*/?$', TemplateView.as_view(template_name='index.html')), +] + + +if settings.DEBUG: + urlpatterns = [ + path('__debug__/', include('debug_toolbar.urls')), + ] + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT) + urlpatterns + diff --git a/api/src/reportcreator_api/conf/wsgi.py b/api/src/reportcreator_api/conf/wsgi.py new file mode 100644 index 0000000..51a6f17 --- /dev/null +++ b/api/src/reportcreator_api/conf/wsgi.py @@ -0,0 +1,18 @@ +""" +WSGI config for reportcreator_api project. + +It exposes the WSGI callable as a module-level variable named ``application``. + +For more information on this file, see +https://docs.djangoproject.com/en/4.0/howto/deployment/wsgi/ +""" + +import os + +from pathlib import Path +from whitenoise import WhiteNoise +from django.core.wsgi import get_wsgi_application + +os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'reportcreator_api.conf.settings') + +application = get_wsgi_application() diff --git a/api/src/reportcreator_api/management/__init__.py b/api/src/reportcreator_api/management/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/api/src/reportcreator_api/management/commands/__init__.py b/api/src/reportcreator_api/management/commands/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/api/src/reportcreator_api/management/commands/cleanupfiles.py b/api/src/reportcreator_api/management/commands/cleanupfiles.py new file mode 100644 index 0000000..d6bf242 --- /dev/null +++ b/api/src/reportcreator_api/management/commands/cleanupfiles.py @@ -0,0 +1,33 @@ +from django.core.management.base import BaseCommand +from django.db import transaction +from reportcreator_api.pentests.models import ArchivedProject, UploadedAsset, UploadedImage, UploadedProjectFile, UploadedUserNotebookImage + + +class Command(BaseCommand): + help = 'Clean up file entries from the DB where the files do not exist on the fielsystem.' + + def file_exists(self, f): + try: + with f.open(): + return True + except Exception: + return False + + @transaction.atomic + def handle(self, *args, **options): + UploadedAsset.objects \ + .filter(pk__in=[o.pk for o in UploadedAsset.objects.iterator() if not self.file_exists(o.file)]) \ + .delete() + UploadedImage.objects \ + .filter(pk__in=[o.pk for o in UploadedImage.objects.iterator() if not self.file_exists(o.file)]) \ + .delete() + UploadedUserNotebookImage.objects \ + .filter(pk__in=[o.pk for o in UploadedUserNotebookImage.objects.iterator() if not self.file_exists(o.file)]) \ + .delete() + UploadedProjectFile.objects \ + .filter(pk__in=[o.pk for o in UploadedProjectFile.objects.iterator() if not self.file_exists(o.file)]) \ + .delete() + ArchivedProject.objects \ + .filter(pk__in=[o.pk for o in ArchivedProject.objects.iterator() if not self.file_exists(o.file)]) \ + .delete() + diff --git a/api/src/reportcreator_api/management/commands/encryptdata.py b/api/src/reportcreator_api/management/commands/encryptdata.py new file mode 100644 index 0000000..af7b1f1 --- /dev/null +++ b/api/src/reportcreator_api/management/commands/encryptdata.py @@ -0,0 +1,66 @@ +import itertools +import warnings +import copy +from django.conf import settings +from django.core.management.base import BaseCommand, CommandError +from django.test import override_settings + +from reportcreator_api.pentests.models import PentestFinding, PentestProject, ProjectType, UploadedAsset, UploadedImage, \ + UploadedProjectFile, UploadedUserNotebookImage, NotebookPage, UserPublicKey, ArchivedProjectKeyPart, ArchivedProjectPublicKeyEncryptedKeyPart +from reportcreator_api.users.models import MFAMethod, PentestUser, Session + + +class Command(BaseCommand): + help = 'Encrypt all data using the current encryption key. If data was encrypted with a different key, it is re-encrypted with the current key.' + + def add_arguments(self, parser) -> None: + parser.add_argument('--decrypt', action='store_true', help='Decrypt all data') + + def encrypt_data(self): + # Encrypt DB fields + PentestProject.objects.bulk_update(PentestProject.objects.all().iterator(), ['custom_fields']) + PentestFinding.objects.bulk_update(PentestFinding.objects.all().iterator(), ['custom_fields', 'template_id']) + ProjectType.objects.bulk_update(ProjectType.objects.all().iterator(), ['report_template', 'report_styles', 'report_preview_data']) + NotebookPage.objects.bulk_update(NotebookPage.objects.all(), ['title', 'text']) + PentestUser.objects.bulk_update(PentestUser.objects.all(), ['password']) + Session.objects.bulk_update(Session.objects.all(), ['session_key', 'session_data']) + MFAMethod.objects.bulk_update(MFAMethod.objects.all(), ['data']) + UserPublicKey.objects.bulk_update(UserPublicKey.objects.all(), ['public_key']) + ArchivedProjectKeyPart.objects.bulk_update(ArchivedProjectKeyPart.objects.all(), ['key_part']) + ArchivedProjectPublicKeyEncryptedKeyPart.objects.bulk_update(ArchivedProjectPublicKeyEncryptedKeyPart.objects.all(), ['encrypted_data']) + + # Encrypt files + old_files = [] + for f in itertools.chain( + UploadedImage.objects.all(), + UploadedAsset.objects.all(), + UploadedUserNotebookImage.objects.all(), + UploadedProjectFile.objects.all() + ): + # Copy file content. Encryption is performed during content copy to new file by the storage + old_file = copy.copy(f.file) + f.file.save(name=f.name, content=old_file, save=False) + f.save() + old_files.append(old_file) + for f in old_files: + f.storage.delete(f.name) + + def handle(self, decrypt, *args, **options): + if not settings.ENCRYPTION_KEYS: + raise CommandError('No ENCRYPTION_KEYS configured') + + if decrypt: + if settings.DEFAULT_ENCRYPTION_KEY_ID: + warnings.warn('A DEFAULT_ENCRYPTION_KEY_ID is configured. New and updated data will be encrypted while storing it. Set DEFAULT_ENCRYPTION_KEY_ID=None to permanently disable encryption.') + + with override_settings(DEFAULT_ENCRYPTION_KEY_ID=None, ENCRYPTION_PLAINTEXT_FALLBACK=True): + self.encrypt_data() + else: + if not settings.DEFAULT_ENCRYPTION_KEY_ID: + raise CommandError('No DEFAULT_ENCRYPTION_KEY_ID configured') + if not settings.ENCRYPTION_KEYS.get(settings.DEFAULT_ENCRYPTION_KEY_ID): + raise CommandError('Invalid DEFAULT_ENCRYPTION_KEY_ID') + with override_settings(ENCRYPTION_PLAINTEXT_FALLBACK=True): + self.encrypt_data() + + diff --git a/api/src/reportcreator_api/management/commands/importdemodata.py b/api/src/reportcreator_api/management/commands/importdemodata.py new file mode 100644 index 0000000..4c984e7 --- /dev/null +++ b/api/src/reportcreator_api/management/commands/importdemodata.py @@ -0,0 +1,50 @@ + + +import argparse +import shutil +import tempfile +import uuid +from django.conf import settings +from django.core.management.base import BaseCommand, CommandError +from reportcreator_api.pentests.models.project import PentestProject + +from reportcreator_api.users.models import PentestUser +from reportcreator_api.archive.import_export import import_project_types, import_templates, import_projects + + +class Command(BaseCommand): + help = 'Import archives containing demo data' + + def add_arguments(self, parser): + parser.add_argument('file', nargs='?', type=argparse.FileType('rb'), default='-') + parser.add_argument('--type', choices=['design', 'template', 'project']) + parser.add_argument('--add-member', action='append', help='Add user as member to imported projects') + + def get_user(self, u): + try: + return PentestUser.objects.get(id=uuid.UUID(u)) + except Exception: + pass + try: + return PentestUser.objects.get(username=u) + except PentestUser.DoesNotExist: + raise CommandError(f'User "{u}" not found') + + def handle(self, file, type, add_member, *args, **options): + if type == 'project': + add_member = list(map(self.get_user, add_member)) + + import_func = { + 'design': import_project_types, + 'template': import_templates, + 'project': import_projects, + }[type] + + with tempfile.SpooledTemporaryFile(max_size=settings.FILE_UPLOAD_MAX_MEMORY_SIZE, mode='w+b') as f: + shutil.copyfileobj(file, f) + f.seek(0) + imported = import_func(f) + if type == 'project': + for u in add_member: + PentestProject.objects.add_member(u, imported) + diff --git a/api/src/reportcreator_api/notifications/__init__.py b/api/src/reportcreator_api/notifications/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/api/src/reportcreator_api/notifications/admin.py b/api/src/reportcreator_api/notifications/admin.py new file mode 100644 index 0000000..3301430 --- /dev/null +++ b/api/src/reportcreator_api/notifications/admin.py @@ -0,0 +1,15 @@ +from django.contrib import admin + +from reportcreator_api.utils.admin import BaseAdmin +from reportcreator_api.notifications.models import NotificationSpec, UserNotification + + +@admin.register(NotificationSpec) +class NotificationSpecAdmin(BaseAdmin): + pass + + +@admin.register(UserNotification) +class UserNotificationAdmin(BaseAdmin): + pass + diff --git a/api/src/reportcreator_api/notifications/apps.py b/api/src/reportcreator_api/notifications/apps.py new file mode 100644 index 0000000..8e0381d --- /dev/null +++ b/api/src/reportcreator_api/notifications/apps.py @@ -0,0 +1,10 @@ +from django.apps import AppConfig + + +class NotificationsConfig(AppConfig): + default_auto_field = 'django.db.models.BigAutoField' + name = 'reportcreator_api.notifications' + + def ready(self) -> None: + from . import signals # noqa + from . import tasks # noqa diff --git a/api/src/reportcreator_api/notifications/migrations/0001_initial.py b/api/src/reportcreator_api/notifications/migrations/0001_initial.py new file mode 100644 index 0000000..1c5f9cd --- /dev/null +++ b/api/src/reportcreator_api/notifications/migrations/0001_initial.py @@ -0,0 +1,55 @@ +# Generated by Django 4.1.3 on 2023-01-24 13:11 + +from django.conf import settings +from django.db import migrations, models +import django.db.models.deletion +import reportcreator_api.utils.models +import uuid + + +class Migration(migrations.Migration): + + initial = True + + dependencies = [ + migrations.swappable_dependency(settings.AUTH_USER_MODEL), + ] + + operations = [ + migrations.CreateModel( + name='NotificationSpec', + fields=[ + ('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)), + ('created', models.DateTimeField(default=reportcreator_api.utils.models.now, editable=False)), + ('updated', models.DateTimeField(auto_now=True)), + ('active_until', models.DateField(blank=True, db_index=True, null=True)), + ('instance_conditions', models.JSONField(blank=True, default=dict)), + ('user_conditions', models.JSONField(blank=True, default=dict)), + ('visible_for_days', models.IntegerField(blank=True, null=True)), + ('title', models.CharField(max_length=255)), + ('text', models.TextField()), + ('link_url', models.TextField(blank=True, null=True)), + ], + options={ + 'ordering': ['-created'], + 'abstract': False, + }, + bases=(reportcreator_api.utils.models.ModelDiffMixin, models.Model), + ), + migrations.CreateModel( + name='UserNotification', + fields=[ + ('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)), + ('created', models.DateTimeField(default=reportcreator_api.utils.models.now, editable=False)), + ('updated', models.DateTimeField(auto_now=True)), + ('visible_until', models.DateTimeField(blank=True, null=True)), + ('read', models.BooleanField(db_index=True, default=False)), + ('notification', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='notifications.notificationspec')), + ('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='notifications', to=settings.AUTH_USER_MODEL)), + ], + options={ + 'unique_together': {('user', 'notification')}, + }, + bases=(reportcreator_api.utils.models.ModelDiffMixin, models.Model), + ), + ] diff --git a/api/src/reportcreator_api/notifications/migrations/__init__.py b/api/src/reportcreator_api/notifications/migrations/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/api/src/reportcreator_api/notifications/models.py b/api/src/reportcreator_api/notifications/models.py new file mode 100644 index 0000000..a397801 --- /dev/null +++ b/api/src/reportcreator_api/notifications/models.py @@ -0,0 +1,38 @@ +from django.db import models + +from reportcreator_api.notifications import querysets +from reportcreator_api.utils.models import BaseModel +from reportcreator_api.users.models import PentestUser + + +class NotificationSpec(BaseModel): + """ + Specification for a notification that gets assigned to users. + """ + active_until = models.DateField(null=True, blank=True, db_index=True) + instance_conditions = models.JSONField(default=dict, blank=True) + user_conditions = models.JSONField(default=dict, blank=True) + visible_for_days = models.IntegerField(null=True, blank=True) + + title = models.CharField(max_length=255) + text = models.TextField() + link_url = models.TextField(null=True, blank=True) + + objects = querysets.NotificationSpecManager() + + +class UserNotification(BaseModel): + """ + Notification assigned to a specific user. Can marked as read. + """ + user = models.ForeignKey(to=PentestUser, on_delete=models.CASCADE, related_name='notifications') + notification = models.ForeignKey(to=NotificationSpec, on_delete=models.CASCADE) + + visible_until = models.DateTimeField(null=True, blank=True, ) + read = models.BooleanField(default=False, db_index=True) + + objects = models.Manager.from_queryset(querysets.UserNotificationQuerySet)() + + class Meta: + unique_together = [('user', 'notification')] + diff --git a/api/src/reportcreator_api/notifications/querysets.py b/api/src/reportcreator_api/notifications/querysets.py new file mode 100644 index 0000000..9f2590e --- /dev/null +++ b/api/src/reportcreator_api/notifications/querysets.py @@ -0,0 +1,128 @@ +from packaging import version +from datetime import timedelta +from django.conf import settings +from django.db import models +from django.utils import timezone +from django.db.models import signals + +from reportcreator_api.utils import license + + +class UserNotificationQuerySet(models.QuerySet): + def only_permitted(self, user): + return self.filter(user=user) + + def only_visible(self): + return self \ + .filter(models.Q(notification__active_until__isnull=True) | models.Q(notification__active_until__gt=timezone.now())) \ + .filter(models.Q(visible_until__isnull=True) | models.Q(visible_until__gt=timezone.now())) + + +class NotificationSpecQuerySet(models.QuerySet): + def only_active(self): + return self.filter(models.Q(active_until__isnull=True) | models.Q(active_until__gte=timezone.now())) + + +class NotificationSpecManager(models.Manager.from_queryset(NotificationSpecQuerySet)): + def parse_version(self, version_str): + try: + return version.Version(version_str) + except (version.InvalidVersion, TypeError): + return None + + def check_version(self, version_condition): + current_version = self.parse_version(settings.VERSION) + if not current_version: + if settings.VERSION and version_condition and (version_condition == settings.VERSION or version_condition == f'=={settings.VERSION}'): + return True + return False + + if version_condition.startswith('=='): + return current_version == self.parse_version(version_condition[2:]) + elif version_condition.startswith('>='): + required_version = self.parse_version(version_condition[2:]) + return required_version and current_version >= required_version + elif version_condition.startswith('<='): + required_version = self.parse_version(version_condition[2:]) + return required_version and current_version <= required_version + elif version_condition.startswith('>'): + required_version = self.parse_version(version_condition[1:]) + return required_version and current_version > required_version + elif version_condition.startswith('<'): + required_version = self.parse_version(version_condition[1:]) + return required_version and current_version < required_version + else: + return current_version == self.parse_version(version_condition) + + def check_instance_conditions(self, notification): + current_instance_tags = list(settings.INSTANCE_TAGS) + if license.is_professional(): + current_instance_tags.append('license:professional') + elif not license.is_professional() and not license.check_license()['error']: + current_instance_tags.append('license:community') + if (instance_tags := set(notification.instance_conditions.get('any_tag', []))) and not instance_tags.intersection(current_instance_tags): + return False + if (version_condition := notification.instance_conditions.get('version')) and not self.check_version(version_condition): + return False + return True + + def users_for_notification(self, notification): + from reportcreator_api.users.models import PentestUser + + if notification.active_until and notification.active_until < timezone.now().date(): + return PentestUser.objects.none() + + # Instance conditions + if not self.check_instance_conditions(notification): + return PentestUser.objects.none() + + # User conditions + users = PentestUser.objects.all() + for role in ['is_superuser', 'is_designer', 'is_template_editor', 'is_user_manager']: + if role in notification.user_conditions and isinstance(notification.user_conditions[role], bool): + users = users.filter(**{role: notification.user_conditions[role]}) + + return users + + def notifications_for_user(self, user): + from reportcreator_api.notifications.models import NotificationSpec + + notifications = NotificationSpec.objects \ + .only_active() \ + .filter(models.Q(user_conditions__is_superuser__isnull=True) | models.Q(user_conditions__is_superuser=user.is_superuser)) \ + .filter(models.Q(user_conditions__is_desinger__isnull=True) | models.Q(user_conditions__is_designer=user.is_designer)) \ + .filter(models.Q(user_conditions__is_template_editor__isnull=True) | models.Q(user_conditions__is_template_editor=user.is_template_editor)) \ + .filter(models.Q(user_conditions__is_user_manager__isnull=True) | models.Q(user_conditions__is_user_manager=user.is_user_manager)) + notifications = list(filter(self.check_instance_conditions, notifications)) + return notifications + + def assign_to_users(self, notification): + from reportcreator_api.notifications.models import UserNotification + users = self.users_for_notification(notification) \ + .exclude(notifications__notification=notification) + + user_notifications = [] + for u in users: + visible_until = None + if notification.visible_for_days: + visible_until = timezone.now() + timedelta(days=notification.visible_for_days) + user_notifications.append(UserNotification(user=u, notification=notification, visible_until=visible_until)) + UserNotification.objects.bulk_create(user_notifications) + + def assign_to_notifications(self, user): + from reportcreator_api.notifications.models import UserNotification + notifications = self.notifications_for_user(user) + + user_notifications = [] + for n in notifications: + visible_until = None + if n.visible_for_days: + visible_until = timezone.now() + timedelta(days=n.visible_for_days) + user_notifications.append(UserNotification(user=user, notification=n, visible_until=visible_until)) + UserNotification.objects.bulk_create(user_notifications) + + def bulk_create(self, *args, **kwargs): + objs = super().bulk_create(*args, **kwargs) + for o in objs: + signals.post_save.send(sender=o.__class__, instance=o, created=True, raw=False, update_fields=None) + return objs diff --git a/api/src/reportcreator_api/notifications/serializers.py b/api/src/reportcreator_api/notifications/serializers.py new file mode 100644 index 0000000..aafac5f --- /dev/null +++ b/api/src/reportcreator_api/notifications/serializers.py @@ -0,0 +1,57 @@ +from datetime import timedelta +from django.utils import timezone +from rest_framework import serializers + +from reportcreator_api.notifications.models import UserNotification, NotificationSpec + + +class NotificationSpecContentSerializer(serializers.ModelSerializer): + class Meta: + model = NotificationSpec + fields = ['title', 'text', 'link_url'] + + +class UserNotificationSerializer(serializers.ModelSerializer): + content = NotificationSpecContentSerializer(source='notification', read_only=True) + + class Meta: + model = UserNotification + fields = ['id', 'created', 'updated', 'read', 'content'] + + +class InstanceConditionsSerializer(serializers.Serializer): + version = serializers.RegexField(r'^(==|>=|<=|>|<)?[0-9a-zA-Z.]+$', required=False) + any_tags = serializers.ListField(child=serializers.CharField(), required=False) + + +class UserConditionsSerializer(serializers.Serializer): + is_superuser = serializers.BooleanField(required=False) + is_user_manager = serializers.BooleanField(required=False) + is_designer = serializers.BooleanField(required=False) + is_template_editor = serializers.BooleanField(required=False) + + +class NotificationSpecListSerializer(serializers.ListSerializer): + def create(self, validated_data): + notifications = [NotificationSpec(**n) for n in validated_data] + # Set deleted notifications as inactive + NotificationSpec.objects \ + .only_active() \ + .exclude(id__in=[n.id for n in notifications]) \ + .update(active_until=(timezone.now() - timedelta(days=1)).date()) + # Create new notifications + existing_notification_ids = set(NotificationSpec.objects.filter(id__in=[n.id for n in notifications]).values_list('id', flat=True)) + new_notifications = list(filter(lambda n: n.id not in existing_notification_ids and NotificationSpec.objects.check_instance_conditions(n), notifications)) + return NotificationSpec.objects.bulk_create(new_notifications) + + +class NotificationSpecSerializer(serializers.ModelSerializer): + instance_conditions = InstanceConditionsSerializer(required=False) + user_conditions = UserConditionsSerializer(required=False) + + class Meta: + model = NotificationSpec + fields = ['id', 'active_until', 'visible_for_days', 'instance_conditions', 'user_conditions', 'title', 'text', 'link_url'] + extra_kwargs = {'id': {'read_only': False}} + list_serializer_class = NotificationSpecListSerializer + diff --git a/api/src/reportcreator_api/notifications/signals.py b/api/src/reportcreator_api/notifications/signals.py new file mode 100644 index 0000000..f247f07 --- /dev/null +++ b/api/src/reportcreator_api/notifications/signals.py @@ -0,0 +1,25 @@ +from django.db.models import signals +from django.dispatch import receiver + +from reportcreator_api.notifications.models import NotificationSpec +from reportcreator_api.users.models import PentestUser +from reportcreator_api.utils.models import disable_for_loaddata + + +@receiver(signals.post_save, sender=NotificationSpec) +@disable_for_loaddata +def notification_created(sender, instance, created, *args, **kwargs): + if not created: + return + + NotificationSpec.objects.assign_to_users(instance) + + +@receiver(signals.post_save, sender=PentestUser) +@disable_for_loaddata +def user_created(sender, instance, created, *args, **kwargs): + if not created: + return + + NotificationSpec.objects.assign_to_notifications(instance) + diff --git a/api/src/reportcreator_api/notifications/tasks.py b/api/src/reportcreator_api/notifications/tasks.py new file mode 100644 index 0000000..b78f46a --- /dev/null +++ b/api/src/reportcreator_api/notifications/tasks.py @@ -0,0 +1,23 @@ +import httpx +from asgiref.sync import sync_to_async +from django.conf import settings + +from reportcreator_api.notifications.serializers import NotificationSpecSerializer + + +async def fetch_notifications_request(): + async with httpx.AsyncClient(timeout=10) as client: + res = await client.get(settings.NOTIFICATION_IMPORT_URL) + res.raise_for_status() + return res.json() + + +async def fetch_notifications(task_info): + if not settings.NOTIFICATION_IMPORT_URL: + return + + data = await fetch_notifications_request() + serializer = NotificationSpecSerializer(data=data, many=True) + serializer.is_valid(raise_exception=True) + await sync_to_async(serializer.save)() + diff --git a/api/src/reportcreator_api/notifications/views.py b/api/src/reportcreator_api/notifications/views.py new file mode 100644 index 0000000..63e40af --- /dev/null +++ b/api/src/reportcreator_api/notifications/views.py @@ -0,0 +1,21 @@ +from rest_framework import viewsets, mixins, permissions +from rest_framework.settings import api_settings + +from reportcreator_api.notifications.models import UserNotification +from reportcreator_api.notifications.serializers import UserNotificationSerializer + + +class NotificationPermissions(permissions.BasePermission): + def has_permission(self, request, view): + return view.kwargs.get('pentestuser_pk') == 'self' + + +class NotificationViewSet(mixins.ListModelMixin, mixins.RetrieveModelMixin, mixins.UpdateModelMixin, viewsets.GenericViewSet): + serializer_class = UserNotificationSerializer + permission_classes = api_settings.DEFAULT_PERMISSION_CLASSES + [NotificationPermissions] + + def get_queryset(self): + return UserNotification.objects \ + .only_permitted(self.request.user) \ + .only_visible() \ + .select_related('notification') diff --git a/api/src/reportcreator_api/pentests/__init__.py b/api/src/reportcreator_api/pentests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/api/src/reportcreator_api/pentests/admin.py b/api/src/reportcreator_api/pentests/admin.py new file mode 100644 index 0000000..9a52547 --- /dev/null +++ b/api/src/reportcreator_api/pentests/admin.py @@ -0,0 +1,131 @@ +from django.contrib import admin + +from reportcreator_api.pentests.models import FindingTemplate, NotebookPage, PentestFinding, PentestProject, ProjectType, ReportSection, \ + UploadedImage, UploadedAsset, UploadedProjectFile, UploadedUserNotebookImage, ProjectMemberInfo, ProjectMemberRole, \ + UserPublicKey, ArchivedProject, ArchivedProjectKeyPart, ArchivedProjectPublicKeyEncryptedKeyPart +from reportcreator_api.utils.admin import BaseAdmin, admin_change_url, admin_changelist_url + + +@admin.register(ProjectType) +class ProjectTypeAdmin(BaseAdmin): + def link_projects(self, obj): + return admin_changelist_url('Projects using this ProjectType', 'pentests', 'pentestproject', {'project_type_id': obj.id}) + + def link_uploaded_assets(self, obj): + return admin_change_url('Uploaded assets', 'pentests', 'uploadedasset', {'projecttype_id': obj.id}) + + +@admin.register(ProjectMemberRole) +class ProjectMemberRoleAdmin(BaseAdmin): + list_display = ['role', 'default'] + + +class ProjectMemberInfoInlineAdmin(admin.StackedInline): + model = ProjectMemberInfo + + +@admin.register(PentestProject) +class PentestProjectAdmin(BaseAdmin): + inlines = [ProjectMemberInfoInlineAdmin] + + def link_findings(self, obj): + return admin_changelist_url('Findings of this project', 'pentests', 'pentestfinding', {'project_id': obj.id}) + + def link_project_type(self, obj): + return admin_change_url(obj.project_type.name, 'pentests', 'projecttype', obj.project_type.id) + + def link_uploaded_images(self, obj): + return admin_changelist_url('Uploaded images', 'pentests', 'uploadedimage', {'linked_object_id': obj.id}) + + def link_notes(self, obj): + return admin_changelist_url('Notebook pages', 'pentests', 'notebookpage', {'project_id': obj.id}) + + def link_uploaded_files(self, obj): + return admin_changelist_url('Uploaded files', 'pentests', 'uploadedprojectfile', {'linked_object_id': obj.id}) + + +@admin.register(PentestFinding) +class PentestFindingAdmin(BaseAdmin): + def link_project(self, obj): + return admin_change_url(obj.project.name, 'pentests', 'pentestproject', obj.project.id) + + +@admin.register(ReportSection) +class ReportSectionAdmin(BaseAdmin): + def link_project(self, obj): + return admin_change_url(obj.project.name, 'pentests', 'pentestproject', obj.project.id) + + +@admin.register(NotebookPage) +class NotebookPageAdmin(BaseAdmin): + def link_project(self, obj): + return admin_change_url(obj.project.name, 'pentests', 'pentestproject', obj.project.id) + + def link_user(self, obj): + return admin_change_url(obj.user.name, 'users', 'pentestuser', obj.user.id) + + +@admin.register(FindingTemplate) +class FindingTemplateAdmin(BaseAdmin): + pass + + +@admin.register(UploadedImage) +class UploadedImageAdmin(BaseAdmin): + def link_project(self, obj): + return admin_change_url(obj.linked_object.name, 'pentests', 'pentestproject', obj.linked_object.id) + + +@admin.register(UploadedAsset) +class UploadedAssetAdmin(BaseAdmin): + def link_project_type(self, obj): + return admin_change_url(obj.linked_object.name, 'pentests', 'projecttype', obj.linked_object.id) + + +@admin.register(UploadedUserNotebookImage) +class UploadedUserNotebookImageAdmin(BaseAdmin): + def link_user(self, obj): + return admin_change_url(obj.linked_object.name, 'users', 'pentestuser', obj.linked_object.id) + + +@admin.register(UploadedProjectFile) +class UploadedProjectFileAdmin(BaseAdmin): + def link_project(self, obj): + return admin_change_url(obj.linked_object.name, 'pentests', 'pentestproject', obj.linked_object.id) + + +@admin.register(UserPublicKey) +class UserPublicKeyAdmin(BaseAdmin): + def link_user(self, obj): + return admin_change_url(obj.user.username, 'users', 'pentestuser', obj.user.id) + + def link_encrypted_key_parts(self, obj): + return admin_changelist_url('ArchivedProjectPublicKeyEncryptedKeyPart encrypted with this public key', 'pentests', 'archivedprojectpublickeyencryptedkeypart', {'public_key_id': obj.id}) + + +@admin.register(ArchivedProject) +class ArchivedProjectAdmin(BaseAdmin): + def link_key_parts(self, obj): + return admin_changelist_url('key parts', 'pentests', 'archivedprojectkeypart', {'archived_project_id': obj.id}) + + +@admin.register(ArchivedProjectKeyPart) +class ArchivedProjectKeyPartAdmin(BaseAdmin): + def link_user(self, obj): + return admin_change_url(obj.user.username, 'users', 'pentestuser', obj.user.id) + + def link_archive(self, obj): + return admin_change_url(obj.archived_project.name, 'pentests', 'archivedproject', obj.archived_project.id) + + def link_encrypted_key_parts(self, obj): + return admin_changelist_url('Encrypted key part data', 'pentests', 'archivedprojectpublickeyencryptedkeypart', {'key_part_id': obj.id}) + + +@admin.register(ArchivedProjectPublicKeyEncryptedKeyPart) +class ArchivedProjectPublicKeyEncryptedKeyPartAdmin(BaseAdmin): + def link_key_part(self, obj): + return admin_change_url('Archive key part', 'pentests', 'archivedprojectkeypart', obj.key_part.id) + + def link_public_key(self, obj): + return admin_change_url(obj.public_key.name, 'pentests', 'userpublickey', obj.public_key.id) + diff --git a/api/src/reportcreator_api/pentests/apps.py b/api/src/reportcreator_api/pentests/apps.py new file mode 100644 index 0000000..73434bf --- /dev/null +++ b/api/src/reportcreator_api/pentests/apps.py @@ -0,0 +1,10 @@ +from django.apps import AppConfig + + +class PentestsConfig(AppConfig): + default_auto_field = 'django.db.models.BigAutoField' + name = 'reportcreator_api.pentests' + + def ready(self) -> None: + from . import signals # noqa + diff --git a/api/src/reportcreator_api/pentests/checks.py b/api/src/reportcreator_api/pentests/checks.py new file mode 100644 index 0000000..5dfae64 --- /dev/null +++ b/api/src/reportcreator_api/pentests/checks.py @@ -0,0 +1,149 @@ +import itertools +from typing import Any, Iterable, Union +from reportcreator_api.pentests import cvss +from reportcreator_api.pentests.customfields.types import FieldDataType +from reportcreator_api.pentests.customfields.utils import iterate_fields +from reportcreator_api.pentests.models import PentestFinding, PentestProject, ProjectType, ReportSection, ReviewStatus +from reportcreator_api.utils.error_messages import ErrorMessage, MessageLevel, MessageLocationInfo, MessageLocationType + +from reportcreator_api.utils.utils import find_all_indices + + +class ReportCheck: + def location_info(self, obj, path=None): + if isinstance(obj, PentestProject): + return MessageLocationInfo( + type=MessageLocationType.PROJECT, + id=obj.id, + name=obj.name, + ).for_path(path) + elif isinstance(obj, ReportSection): + return MessageLocationInfo( + type=MessageLocationType.SECTION, + id=obj.section_id, + name=obj.section_label, + ).for_path(path) + elif isinstance(obj, PentestFinding): + return MessageLocationInfo( + type=MessageLocationType.FINDING, + id=obj.finding_id, + name=obj.data.get('title'), + ).for_path(path) + elif isinstance(obj, ProjectType): + return MessageLocationInfo( + type=MessageLocationType.DESIGN, + id=obj.id, + name=obj.name, + ).for_path(path) + else: + raise ValueError('Unsupported MessageLocationInfo') + + def check(self, project: PentestProject) -> Iterable[ErrorMessage]: + return itertools.chain( + self.check_project(project), + *map(self.check_section, project.sections.all()), + *map(self.check_finding, project.findings.all()), + ) + + def check_project(self, project: PentestProject) -> Iterable[ErrorMessage]: + return [] + + def check_section(self, section: ReportSection) -> Iterable[ErrorMessage]: + return [] + + def check_finding(self, finding: PentestFinding) -> Iterable[ErrorMessage]: + return [] + + +class TodoCheck(ReportCheck): + def check_todos_in_field(self, data: dict, definition: dict, location: MessageLocationInfo) -> Iterable[ErrorMessage]: + for p, v, d in iterate_fields(value=data, definition=definition): + if isinstance(v, str): + snippets = [] + for idx in itertools.chain(*map(lambda s: find_all_indices(v, s), ['TODO', 'todo', 'ToDo', 'TO-DO', 'To-Do', 'To-do', 'to-do'])): + snippet = v[idx:].splitlines()[0] + if len(snippet) > 100: + snippet = snippet[:100] + '...' + snippets.append(snippet) + if snippets: + yield ErrorMessage( + level=MessageLevel.WARNING, + location=location.for_path(p), + message='Unresolved TODO', + details='\n'.join(snippets)) + + def check_section(self, section) -> Iterable[ErrorMessage]: + return self.check_todos_in_field(section.data, section.field_definition, self.location_info(section)) + + def check_finding(self, finding) -> Iterable[ErrorMessage]: + return self.check_todos_in_field(finding.data, finding.field_definition, self.location_info(finding)) + + +class EmptyFieldsCheck(ReportCheck): + def check_field(self, data: dict, definition: dict, location: MessageLocationInfo) -> Iterable[ErrorMessage]: + for p, v, d in iterate_fields(value=data, definition=definition): + if getattr(d, 'required', False) and (v is None or v == '' or v == []): + yield ErrorMessage( + level=MessageLevel.WARNING, + location=location.for_path(p), + message='Empty field', + ) + + def check_section(self, section) -> Iterable[ErrorMessage]: + return self.check_field(section.data, section.field_definition, self.location_info(section)) + + def check_finding(self, finding) -> Iterable[ErrorMessage]: + return self.check_field(finding.data, finding.field_definition, self.location_info(finding)) + + +class StatusCheck(ReportCheck): + def check_status(self, obj: Union[ReportSection, PentestFinding]): + if obj.status != ReviewStatus.FINISHED: + yield ErrorMessage( + level=MessageLevel.WARNING, + location=self.location_info(obj=obj), + message=f'Status is not "{ReviewStatus.FINISHED}"', + details=f'Status is "{obj.status}", not status "{ReviewStatus.FINISHED}"', + ) + + def check(self, project: PentestProject) -> Iterable[ErrorMessage]: + # If all findings and sections have status "in-progress", deactivate this check. + # We assume that the users of the project do not use the review feature and statuses. + # This removed unnecessary (and ignored) warnings if no statuses are used. + if any(map(lambda s: s.status != ReviewStatus.IN_PROGRESS, project.sections.all())) or \ + any(map(lambda f: f.status != ReviewStatus.IN_PROGRESS, project.findings.all())): + return super().check(project) + else: + return [] + + def check_section(self, section: ReportSection) -> Iterable[ErrorMessage]: + return self.check_status(section) + + def check_finding(self, finding: PentestFinding) -> Iterable[ErrorMessage]: + return self.check_status(finding) + + +class CvssFieldCheck(ReportCheck): + def check_finding(self, finding) -> Iterable[ErrorMessage]: + for p, v, d in iterate_fields(value=finding.data, definition=finding.field_definition): + if d.type == FieldDataType.CVSS and not cvss.is_cvss(v) and v != 'n/a' and v not in [None, '', 'n/a']: + yield ErrorMessage( + level=MessageLevel.WARNING, + location=self.location_info(obj=finding, path=p), + message='Invalid CVSS vector', + details=f'"{v}" is not a valid CVSS vector. Enter "n/a" when no CVSS vector is applicable.' + ) + + +def run_checks(project) -> Iterable[ErrorMessage]: + def perform_check(checker): + try: + return checker.check(project) + except Exception as ex: + return [ErrorMessage( + level=MessageLevel.ERROR, + location=MessageLocationInfo(type=MessageLocationType.OTHER), + message='Error while checking data', + details=str(ex), + )] + return list(itertools.chain(*map(perform_check, [TodoCheck(), EmptyFieldsCheck(), CvssFieldCheck(), StatusCheck()]))) diff --git a/api/src/reportcreator_api/pentests/customfields/__init__.py b/api/src/reportcreator_api/pentests/customfields/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/api/src/reportcreator_api/pentests/customfields/fielddefinition.schema.json b/api/src/reportcreator_api/pentests/customfields/fielddefinition.schema.json new file mode 100644 index 0000000..d539d87 --- /dev/null +++ b/api/src/reportcreator_api/pentests/customfields/fielddefinition.schema.json @@ -0,0 +1,146 @@ +{ + "$id": "https://syslifters.com/reportcreator/fielddefinition.schem.json", + "$schema": "https://json-schema.org/draft/2019-09/schema", + "title": "Field Definition", + "$defs": { + "field_object": { + "type": "object", + "patternProperties": { + "^[a-zA-Z_][a-zA-Z0-9_]+$": { + "$ref": "#/$defs/field_value", + "required": ["type", "label"] + } + }, + "additionalProperties": false + }, + "field_value": { + "type": "object", + "required": ["type"], + "properties": { + "label": { + "type": "string" + }, + "origin": { + "type": "string", + "enum": ["core", "predefined", "custom"] + } + }, + "anyOf": [ + { + "properties": { + "type": { + "enum": ["string", "markdown", "cvss"] + }, + "default": { + "type": ["string", "null"] + } + } + }, + { + "properties": { + "type": { + "const": "date" + }, + "default": { + "type": ["string", "null"], + "validate": "date" + } + } + }, + { + "properties": { + "type": { + "const": "number" + }, + "default": { + "type": ["number", "null"] + } + } + }, + { + "properties": { + "type": { + "const": "boolean" + }, + "default": { + "type": ["boolean", "null"] + } + } + }, + { + "properties": { + "type": { + "const": "user" + } + } + }, + { + "required": ["choices"], + "properties": { + "type": { + "const": "enum" + }, + "choices": { + "type": "array", + "minItems": 1, + "items": { + "type": "object", + "required": ["value", "label"], + "properties": { + "value": { + "type": "string", + "pattern": "^[a-zA-Z_][0-9a-zA-Z_-]+$" + }, + "label": { + "type": "string" + } + } + } + }, + "default": { + "type": ["string", "null"] + } + } + }, + { + "required": ["suggestions"], + "properties": { + "type": { + "const": "combobox" + }, + "suggestions": { + "type": "array", + "minItems": 0, + "items": { + "type": "string" + } + } + } + }, + { + "required": ["properties"], + "properties": { + "type": { + "const": "object" + }, + "properties": { + "$ref": "#/$defs/field_object" + } + } + }, + { + "required": ["items"], + "properties": { + "type": { + "const": "list" + }, + "items": { + "$ref": "#/$defs/field_value" + } + } + } + ] + } + }, + "$ref": "#/$defs/field_object" +} diff --git a/api/src/reportcreator_api/pentests/customfields/mixins.py b/api/src/reportcreator_api/pentests/customfields/mixins.py new file mode 100644 index 0000000..c4653a6 --- /dev/null +++ b/api/src/reportcreator_api/pentests/customfields/mixins.py @@ -0,0 +1,69 @@ +from django.db import models +from django.core.serializers.json import DjangoJSONEncoder +from reportcreator_api.archive.crypto.fields import EncryptedField +from reportcreator_api.pentests.customfields.types import FieldDefinition + +from reportcreator_api.pentests.customfields.utils import HandleUndefinedFieldsOptions, ensure_defined_structure +from reportcreator_api.pentests.customfields.validators import FieldValuesValidator +from reportcreator_api.utils.utils import copy_keys, omit_keys, merge + + +class CustomFieldsMixin(models.Model): + custom_fields = models.JSONField(encoder=DjangoJSONEncoder, default=dict) + + class Meta: + abstract = True + + @property + def field_definition(self) -> dict[str, FieldDefinition]: + return {} + + @property + def core_field_names(self) -> list[str]: + return [] + + @property + def data(self) -> dict: + """ + Return a dict of all field values. + Sets default values, if a field is not defined. + Does not include data of undefined fields not present in the definition. + """ + return self.get_data() + + @property + def data_all(self) -> dict: + return self.get_data(include_undefined=True) + + def get_data(self, handle_undefined=HandleUndefinedFieldsOptions.FILL_NONE, include_undefined=False) -> dict: + # Build dict of all current values + # Merge core fields stored directly on the model instance and custom_fields stored as dict + out = self.custom_fields.copy() + for k in self.core_field_names: + out[k] = getattr(self, k) + + # recursively check for undefined fields and set default value + out = ensure_defined_structure(value=out, definition=self.field_definition, handle_undefined=handle_undefined, include_undefined=include_undefined) + + return out + + + def update_data(self, value): + # Merge with previous custom data + value = merge(self.data, value) + + # Validate data + FieldValuesValidator(self.field_definition)(value) + + # Distribute to model fields + for k, v in copy_keys(value, self.core_field_names).items(): + setattr(self, k, v) + self.custom_fields = self.custom_fields | omit_keys(value, self.core_field_names) + + +class EncryptedCustomFieldsMixin(CustomFieldsMixin): + custom_fields = EncryptedField(base_field=models.JSONField(encoder=DjangoJSONEncoder, default=dict)) + + class Meta(CustomFieldsMixin.Meta): + abstract = True + diff --git a/api/src/reportcreator_api/pentests/customfields/predefined_fields.py b/api/src/reportcreator_api/pentests/customfields/predefined_fields.py new file mode 100644 index 0000000..6ff90c0 --- /dev/null +++ b/api/src/reportcreator_api/pentests/customfields/predefined_fields.py @@ -0,0 +1,121 @@ +from reportcreator_api.pentests.customfields.types import CvssField, EnumChoice, EnumField, FieldOrigin, ListField, MarkdownField, ObjectField, StringField, DateField, UserField, BooleanField, field_definition_to_dict +from reportcreator_api.utils.utils import copy_keys + + +# Core fields are the minimum required fields +# These fields are required internally and cannot be removed or changed +FINDING_FIELDS_CORE = { + 'title': StringField(origin=FieldOrigin.CORE, label='Title', spellcheck=True, default='TODO: Finding Title'), + 'cvss': CvssField(origin=FieldOrigin.CORE, label='CVSS', default='n/a'), +} +# Prdefined fields are a set of fields which +FINDING_FIELDS_PREDEFINED = { + 'summary': MarkdownField(origin=FieldOrigin.PREDEFINED, label='Summary', required=True, default='TODO: High-level summary'), + 'description': MarkdownField(origin=FieldOrigin.PREDEFINED, label='Technical Description', required=True, default='TODO: detailed technical description what this findings is about and how it can be exploited'), + 'precondition': StringField(origin=FieldOrigin.PREDEFINED, label='Precondition', required=True, spellcheck=True, default=None), + 'impact': MarkdownField(origin=FieldOrigin.PREDEFINED, label='Impact', required=True, default='TODO: impact of finding'), + 'recommendation': MarkdownField(origin=FieldOrigin.PREDEFINED, label='Recommendation', required=True, default='TODO: how to fix the vulnerability'), + 'short_recommendation': StringField(origin=FieldOrigin.PREDEFINED, label='Short Recommendation', required=True, spellcheck=True, default='TODO: short recommendation'), + 'references': ListField(origin=FieldOrigin.PREDEFINED, label='References', required=False, + items=StringField(origin=FieldOrigin.PREDEFINED, label='Reference', default=None)), + 'affected_components': ListField(origin=FieldOrigin.PREDEFINED, label='Affected Components', required=True, + items=StringField(origin=FieldOrigin.PREDEFINED, label='Component', default='TODO: affected component')), + 'owasp_top10_2021': EnumField(origin=FieldOrigin.PREDEFINED, label='OWASP Top 10 - 2021', required=True, default=None, choices=[ + EnumChoice(value='A01_2021', label='A01:2021 - Broken Access Control'), + EnumChoice(value='A02_2021', label='A02:2021 - Cryptographic Failures'), + EnumChoice(value='A03_2021', label='A03:2021 - Injection'), + EnumChoice(value='A04_2021', label='A04:2021 - Insecure Design'), + EnumChoice(value='A05_2021', label='A05:2021 - Security Misconfiguration'), + EnumChoice(value='A06_2021', label='A06:2021 - Vulnerable and Outdated Components'), + EnumChoice(value='A07_2021', label='A07:2021 - Identification and Authentication Failures'), + EnumChoice(value='A08_2021', label='A08:2021 - Software and Data Integrity Failures'), + EnumChoice(value='A09_2021', label='A09:2021 - Security Logging and Monitoring Failures'), + EnumChoice(value='A10_2021', label='A10:2021 - Server-Side Request Forgery (SSRF)'), + ]), + 'wstg_category': EnumField(origin=FieldOrigin.PREDEFINED, label='OWASP Web Security Testing Guide Category', required=True, default=None, choices=[ + EnumChoice(value='INFO', label='INFO - Information Gathering'), + EnumChoice(value='CONF', label='CONF - Configuration and Deployment Management'), + EnumChoice(value='IDNT', label='IDNT - Identity Management'), + EnumChoice(value='ATHN', label='ATHN - Authentication'), + EnumChoice(value='ATHZ', label='ATHZ - Authorization'), + EnumChoice(value='SESS', label='SESS - Session Management'), + EnumChoice(value='INPV', label='INPV - Input Validation'), + EnumChoice(value='ERRH', label='ERRH - Error Handling'), + EnumChoice(value='CRYP', label='CRYP - Weak Cryptography'), + EnumChoice(value='BUSL', label='BUSL - Business Logic'), + EnumChoice(value='CLNT', label='CLNT - Client-side Testing'), + EnumChoice(value='APIT', label='APIT - API Testing'), + ]), + + 'retest_notes': MarkdownField(origin=FieldOrigin.PREDEFINED, label='Re-test Notes', required=False, default=None), + 'retest_status': EnumField(origin=FieldOrigin.PREDEFINED, label='Re-test Status', required=False, default=None, choices=[ + EnumChoice(value='open', label='Open'), + EnumChoice(value='resolved', label='Resolved'), + EnumChoice(value='partial', label='Partially Resolved'), + EnumChoice(value='changed', label='Changed'), + EnumChoice(value='accepted', label='Accepted'), + EnumChoice(value='new', label='New'), + ]), +} + +REPORT_FIELDS_CORE = { + 'title': StringField(origin=FieldOrigin.CORE, label='Title', required=True, spellcheck=True, default='TODO: Report Title'), +} +REPORT_FIELDS_PREDEFINED = { + 'is_retest': BooleanField(origin=FieldOrigin.PREDEFINED, label='Is Retest', default=False), +} + + +def finding_fields_default(): + return field_definition_to_dict( + FINDING_FIELDS_CORE | copy_keys(FINDING_FIELDS_PREDEFINED, ['summary', 'description', 'impact', 'recommendation', 'affected_components', 'references']) | { + 'short_recommendation': StringField(label='Short Recommendation', required=True, default='TODO: short recommendation'), + }) + + +def finding_field_order_default(): + return [ + 'title', 'cvss', 'affected_components', + 'summary', 'short_recommendation', + 'description', 'impact', 'recommendation', + 'references' + ] + + + +def report_fields_default(): + return field_definition_to_dict(REPORT_FIELDS_CORE | { + 'scope': MarkdownField(label='Scope', required=True, default='TODO: The scope of this pentest included:\n* Active Directory Domain xyz\n* Internal server network 10.20.30.40/24'), + 'executive_summary': MarkdownField(label='Executive Summary', required=True, default='**TODO: write executive summary**'), + 'customer': StringField(label='Customer', required=True, default='TODO company'), + 'duration': StringField(label='Duration', required=True, default='TODO person days'), + 'start_date': DateField(label='Pentest Start Date', required=True, default=None), + 'end_date': DateField(label='Pentest End Date', required=True, default=None), + 'document_history': ListField(label='Document History', required=True, items=ObjectField(properties={ + 'version': StringField(label='Version', required=True, default='TODO: 1.0'), + 'date': DateField(label='Date', required=True, default=None), + 'description': StringField(label='Description', required=True, default='TODO: description'), + 'authors': ListField(label='Authors', required=True, items=UserField(required=True)), + })) + }) + + +def report_sections_default(): + return [ + { + 'id': 'executive_summary', + 'label': 'Executive Summary', + 'fields': ['executive_summary'], + }, + { + 'id': 'scope', + 'label': 'Scope', + 'fields': ['scope', 'duration', 'start_date', 'end_date'], + }, + { + 'id': 'customer', + 'label': 'Customer', + 'fields': ['customer'], + } + # Other fields not defined elsewhere are put in the "other" section + ] \ No newline at end of file diff --git a/api/src/reportcreator_api/pentests/customfields/sectiondefinition.schema.json b/api/src/reportcreator_api/pentests/customfields/sectiondefinition.schema.json new file mode 100644 index 0000000..ebd1265 --- /dev/null +++ b/api/src/reportcreator_api/pentests/customfields/sectiondefinition.schema.json @@ -0,0 +1,27 @@ +{ + "$id": "https://syslifters.com/reportcreator/sectionddefinition.schem.json", + "$schema": "https://json-schema.org/draft/2019-09/schema", + "title": "Section Definition", + "type": "array", + "items": { + "type": "object", + "required": ["id", "label", "fields"], + "properties": { + "id": { + "type": "string", + "format": "^[a-zA-Z0-9_-]+$", + "maxLength": 255 + }, + "label": { + "type": "string" + }, + "fields": { + "type": "array", + "items": { + "type": "string" + }, + "minItems": 0 + } + } + } +} \ No newline at end of file diff --git a/api/src/reportcreator_api/pentests/customfields/serializers.py b/api/src/reportcreator_api/pentests/customfields/serializers.py new file mode 100644 index 0000000..ef144f7 --- /dev/null +++ b/api/src/reportcreator_api/pentests/customfields/serializers.py @@ -0,0 +1,74 @@ +import datetime +from typing import OrderedDict +from uuid import UUID +from rest_framework import serializers +from reportcreator_api.pentests.customfields.types import FieldDataType, FieldDefinition +from reportcreator_api.users.models import PentestUser + + +class DynamicObjectSerializer(serializers.Serializer): + def __init__(self, *args, **kwargs): + self._declared_fields = kwargs.pop('fields', {}) + super().__init__(*args, **kwargs) + + +class DateField(serializers.DateField): + def to_internal_value(self, value): + date = super().to_internal_value(value) + if isinstance(date, datetime.date): + return date.isoformat() + else: + return date + + +class UserField(serializers.PrimaryKeyRelatedField): + queryset = PentestUser.objects.all() + + def to_internal_value(self, data): + if isinstance(data, (str, UUID)) and (project := self.context.get('project')) and \ + project.imported_members and (imported_user := next(filter(lambda u: data == u.get('id'), project.imported_members), None)): + return imported_user.get('id') + + user = super().to_internal_value(data) + return str(user.id) if isinstance(user, PentestUser) else user + + def to_representation(self, value): + if isinstance(value, (str, UUID)): + return value + return super().to_representation(value) + + +def serializer_from_definition(definition: dict[str, FieldDefinition], **kwargs): + return DynamicObjectSerializer( + fields=dict(filter(lambda t: t[1] is not None, map(lambda t: (t[0], serializer_from_field(t[1])), definition.items()))), + **kwargs) + + +def serializer_from_field(definition): + field_kwargs = { + 'label': definition.label, + 'required': False, + } + value_field_kwargs = field_kwargs | { + 'allow_null': True, + # 'default': getattr(definition, 'default', None), + } + field_type = definition.type + if field_type in [FieldDataType.STRING, FieldDataType.MARKDOWN, FieldDataType.CVSS, FieldDataType.COMBOBOX]: + return serializers.CharField(trim_whitespace=False, allow_blank=True, **value_field_kwargs) + elif field_type == FieldDataType.DATE: + return DateField(**value_field_kwargs) + elif field_type == FieldDataType.NUMBER: + return serializers.FloatField(**value_field_kwargs) + elif field_type == FieldDataType.BOOLEAN: + return serializers.BooleanField(**value_field_kwargs) + elif field_type == FieldDataType.ENUM: + return serializers.ChoiceField(choices=[c.value for c in definition.choices], **value_field_kwargs) + elif field_type == FieldDataType.USER: + return UserField(**value_field_kwargs) + elif field_type == FieldDataType.OBJECT: + return serializer_from_definition(definition.properties, **field_kwargs) + elif field_type == FieldDataType.LIST: + return serializers.ListField(child=serializer_from_field(definition.items), allow_empty=True, **field_kwargs) + else: + raise ValueError(f'Encountered unsupported type in field definition: "{field_type}"') diff --git a/api/src/reportcreator_api/pentests/customfields/types.py b/api/src/reportcreator_api/pentests/customfields/types.py new file mode 100644 index 0000000..3fcd58c --- /dev/null +++ b/api/src/reportcreator_api/pentests/customfields/types.py @@ -0,0 +1,219 @@ +import dataclasses +import enum +from datetime import date +from importlib.metadata import requires +from inspect import isclass +from types import GenericAlias +from typing import Any, Optional, Union +from django.utils.deconstruct import deconstructible +from reportcreator_api.utils.utils import is_date_string + + +@enum.unique +class FieldDataType(enum.Enum): + STRING = 'string' + MARKDOWN = 'markdown' + CVSS = 'cvss' + DATE = 'date' + NUMBER = 'number' + BOOLEAN = 'boolean' + ENUM = 'enum' + COMBOBOX = 'combobox' + USER = 'user' + OBJECT = 'object' + LIST = 'list' + + +@enum.unique +class FieldOrigin(enum.Enum): + CORE = 'core' + PREDEFINED = 'predefined' + CUSTOM = 'custom' + + +@deconstructible +@dataclasses.dataclass +class FieldDefinition: + type: FieldDataType = None + label: str = '' + origin: FieldOrigin = FieldOrigin.CUSTOM + + +@deconstructible +@dataclasses.dataclass +class BaseStringField(FieldDefinition): + default: Optional[str] = None + required: bool = True + + +@deconstructible +@dataclasses.dataclass +class StringField(BaseStringField): + spellcheck: bool = False + type: FieldDataType = FieldDataType.STRING + + +@deconstructible +@dataclasses.dataclass +class MarkdownField(BaseStringField): + type: FieldDataType = FieldDataType.MARKDOWN + + +@deconstructible +@dataclasses.dataclass +class CvssField(BaseStringField): + type: FieldDataType = FieldDataType.CVSS + + +@deconstructible +@dataclasses.dataclass +class ComboboxField(BaseStringField): + type: FieldDataType = FieldDataType.COMBOBOX + suggestions: list[str] = dataclasses.field(default_factory=list) + + +@deconstructible +@dataclasses.dataclass +class DateField(FieldDefinition): + default: Optional[str] = None + required: bool = True + type: FieldDataType = FieldDataType.DATE + + def __post_init__(self): + if self.default and not is_date_string(self.default): + raise ValueError('Default value is not a date', self.default) + + +@deconstructible +@dataclasses.dataclass +class EnumChoice: + value: str + label: str = None + + def __post_init__(self): + self.label = self.value if not self.label else self.label + + +@deconstructible +@dataclasses.dataclass +class EnumField(FieldDefinition): + choices: list[EnumChoice] = dataclasses.field(default_factory=list) + default: Optional[str] = None + required: bool = True + type: FieldDataType = FieldDataType.ENUM + + def __post_init__(self): + if self.default and self.default not in {c.value for c in self.choices}: + raise ValueError('Default value is not a valid enum choice', self.default) + + +@deconstructible +@dataclasses.dataclass +class NumberField(FieldDefinition): + default: Optional[Union[float, int]] = None + required: bool = True + type: FieldDataType = FieldDataType.NUMBER + + +@deconstructible +@dataclasses.dataclass +class BooleanField(FieldDefinition): + default: Optional[bool] = None + type: FieldDataType = FieldDataType.BOOLEAN + + +@deconstructible +@dataclasses.dataclass +class UserField(FieldDefinition): + required: bool = True + type: FieldDataType = FieldDataType.USER + + +@deconstructible +@dataclasses.dataclass +class ObjectField(FieldDefinition): + properties: dict[str, FieldDefinition] = dataclasses.field(default_factory=dict) + type: FieldDataType = FieldDataType.OBJECT + + +@deconstructible +@dataclasses.dataclass +class ListField(FieldDefinition): + items: FieldDefinition = None + required: bool = True + type: FieldDataType = FieldDataType.LIST + + +_FIELD_DATA_TYPE_CLASSES_MAPPING = { + FieldDataType.STRING: StringField, + FieldDataType.MARKDOWN: MarkdownField, + FieldDataType.CVSS: CvssField, + FieldDataType.DATE: DateField, + FieldDataType.NUMBER: NumberField, + FieldDataType.BOOLEAN: BooleanField, + FieldDataType.ENUM: EnumField, + FieldDataType.COMBOBOX: ComboboxField, + FieldDataType.USER: UserField, + FieldDataType.OBJECT: ObjectField, + FieldDataType.LIST: ListField, +} + + +def _field_from_dict(t: type, v: Union[dict, str, Any]) -> FieldDefinition: + if isinstance(t, GenericAlias): + if t.__origin__ is list and isinstance(v, list): + return [_field_from_dict(t.__args__[0], e) for e in v] + elif t.__origin__ is dict and isinstance(v, dict): + return {_field_from_dict(t.__args__[0], k): _field_from_dict(t.__args__[1], e) for k, e in v.items()} + elif isinstance(v, t): + return v + elif isclass(t) and issubclass(t, enum.Enum): + return t(v) + elif isinstance(t, date) and isinstance(v, str): + return date.fromisoformat(v) + elif dataclasses.is_dataclass(t) and isinstance(v, dict): + field_types = {f.name: f.type for f in dataclasses.fields(t)} + dataclass_args = {f: _field_from_dict(field_types[f], v[f]) for f in field_types if f in v} + try: + return t(**dataclass_args) + except TypeError: + pass + + raise ValueError('Could not decode field definition', v) + + +def _parse_field_definition_entry(definition: dict) -> FieldDefinition: + if 'type' not in definition: + raise ValueError('Field type missing') + + type = FieldDataType(definition['type']) + type_class = _FIELD_DATA_TYPE_CLASSES_MAPPING[type] + val = _field_from_dict(type_class, definition) + if type == FieldDataType.OBJECT: + val.properties = parse_field_definition(definition.get('properties', {})) + elif type == FieldDataType.LIST: + val.items = _parse_field_definition_entry(definition.get('items', {})) + + return val + + +def parse_field_definition(definition: dict) -> dict[str, FieldDefinition]: + out = {} + for k, d in definition.items(): + out[k] = _parse_field_definition_entry(d) + return out + + +def field_definition_to_dict(definition: Union[dict[str, FieldDefinition], Any]): + if isinstance(definition, dict): + return {k: field_definition_to_dict(v) for k, v in definition.items()} + elif isinstance(definition, list): + return [field_definition_to_dict(e) for e in definition] + elif dataclasses.is_dataclass(definition): + return field_definition_to_dict(dataclasses.asdict(definition)) + elif isinstance(definition, enum.Enum): + return definition.value + elif isinstance(definition, date): + return date.isoformat() + else: + return definition diff --git a/api/src/reportcreator_api/pentests/customfields/utils.py b/api/src/reportcreator_api/pentests/customfields/utils.py new file mode 100644 index 0000000..50cec85 --- /dev/null +++ b/api/src/reportcreator_api/pentests/customfields/utils.py @@ -0,0 +1,186 @@ +import dataclasses +import enum +import random +from lorem_text import lorem +from typing import Any, Iterable, Optional, Union, OrderedDict +from django.utils import timezone + +from reportcreator_api.pentests.customfields.types import FieldDataType, FieldDefinition, FieldOrigin +from reportcreator_api.utils.utils import is_date_string, is_uuid +from reportcreator_api.utils.error_messages import format_path + + +def contains(a, b): + """ + Checks if dict a contains dict b recursively + """ + if not b: + return True + + if type(a) != type(b): + return False + + for k, v in b.items(): + if k not in a: + return False + if isinstance(v, dict): + if not contains(a[k], v): + return False + elif isinstance(v, (list, tuple)): + raise ValueError('Cannot diff lists') + elif v != b[k]: + return False + return True + + + + + +def has_field_structure_changed(old: dict[str, FieldDefinition], new: dict[str, FieldDefinition]): + if set(old.keys()) != set(new.keys()): + return True + + for k in old.keys(): + field_type = old[k].type + if field_type != new[k].type: + return True + elif field_type == FieldDataType.OBJECT and has_field_structure_changed(old[k].properties, new[k].properties): + return True + elif field_type == FieldDataType.LIST and has_field_structure_changed({'items': old[k].items}, {'items': new[k].items}): + return True + elif field_type == FieldDataType.ENUM and set(map(lambda c: c.value, old[k].choices)) - set(map(lambda c: c.value, new[k].choices)): + # Existing enum choice was removed + return True + + return False + + +class HandleUndefinedFieldsOptions(enum.Enum): + FILL_NONE = 'fill_none' + FILL_DEFAULT = 'fill_default' + FILL_DEMO_DATA = 'fill_demo_data' + + +def _default_or_demo_data(definition: FieldDefinition, demo_data: Any, handle_undefined: HandleUndefinedFieldsOptions): + if handle_undefined == HandleUndefinedFieldsOptions.FILL_NONE: + return None + elif handle_undefined == HandleUndefinedFieldsOptions.FILL_DEFAULT: + return definition.default + elif handle_undefined == HandleUndefinedFieldsOptions.FILL_DEMO_DATA: + return definition.default or demo_data + + +def ensure_defined_structure(value, definition: Union[dict[str, FieldDefinition], FieldDefinition], handle_undefined: HandleUndefinedFieldsOptions = HandleUndefinedFieldsOptions.FILL_DEFAULT, include_undefined=False): + """ + Ensure that the returned data is valid for the given field definition. + Recursively check for undefined fields and set a value. + Returns only data of defined fields, if value contains undefined fields, this data is not returned. + """ + if isinstance(definition, dict): + out = value if include_undefined else {} + for k, d in definition.items(): + out[k] = ensure_defined_structure(value=(value if isinstance(value, dict) else {}).get(k), definition=d, handle_undefined=handle_undefined) + return out + else: + if definition.type == FieldDataType.OBJECT: + return ensure_defined_structure(value, definition.properties, handle_undefined=handle_undefined) + elif definition.type == FieldDataType.LIST: + if isinstance(value, list): + return [ensure_defined_structure(value=e, definition=definition.items, handle_undefined=handle_undefined) for e in value] + else: + if handle_undefined == HandleUndefinedFieldsOptions.FILL_DEMO_DATA and definition.items.type != FieldDataType.USER: + return [ensure_defined_structure(value=None, definition=definition.items, handle_undefined=handle_undefined) for _ in range(2)] + else: + return [] + elif definition.type == FieldDataType.MARKDOWN and not isinstance(value, str): + return _default_or_demo_data(definition, lorem.paragraphs(3), handle_undefined=handle_undefined) + elif definition.type == FieldDataType.STRING and not isinstance(value, str): + return _default_or_demo_data(definition, lorem.words(2), handle_undefined=handle_undefined) + elif definition.type == FieldDataType.CVSS and not isinstance(value, str): + return _default_or_demo_data(definition, 'n/a', handle_undefined=handle_undefined) + elif definition.type == FieldDataType.ENUM and not (isinstance(value, str) and value in {c.value for c in definition.choices}): + return _default_or_demo_data(definition, next(iter(map(lambda c: c.value, definition.choices)), None), handle_undefined=handle_undefined) + elif definition.type == FieldDataType.COMBOBOX and not isinstance(value, str): + return _default_or_demo_data(definition, next(iter(definition.suggestions), None), handle_undefined=handle_undefined) + elif definition.type == FieldDataType.DATE and not (isinstance(value, str) and is_date_string(value)): + return _default_or_demo_data(definition, timezone.now().date().isoformat(), handle_undefined=handle_undefined) + elif definition.type == FieldDataType.NUMBER and not isinstance(value, (int, float)): + return _default_or_demo_data(definition, random.randint(1, 10), handle_undefined=handle_undefined) + elif definition.type == FieldDataType.BOOLEAN and not isinstance(value, bool): + return _default_or_demo_data(definition, random.choice([True, False]), handle_undefined=handle_undefined) + elif definition.type == FieldDataType.USER and not (isinstance(value, str) or is_uuid(value)): + return None + else: + return value + + +def check_definitions_compatible(a: Union[dict[str, FieldDefinition], FieldDefinition], b: Union[dict[str, FieldDefinition], FieldDefinition], path: Optional[tuple[str]] = None) -> tuple[bool, list[str]]: + """ + Check if definitions are compatible and values can be converted without data loss. + """ + path = path or tuple() + valid = True + errors = [] + if isinstance(a, dict) and isinstance(b, dict): + for k in set(a.keys()).intersection(b.keys()): + res_valid, res_errors = check_definitions_compatible(a[k], b[k], path=path + tuple([k])) + valid = valid and res_valid + errors.extend(res_errors) + elif isinstance(a, FieldDefinition) and isinstance(b, FieldDefinition): + if a.type != b.type: + valid = False + errors.append(f'Field "{format_path(path)}" has different types: "{a.type.value}" vs. "{b.type.value}"') + elif a.type == FieldDataType.LIST: + res_valid, res_errors = check_definitions_compatible(a.items, b.items, path=path + tuple(['[]'])) + valid = valid and res_valid + errors.extend(res_errors) + elif a.type == FieldDataType.ENUM: + missing_choices = {c.value for c in a.choices} - {c.value for c in b.choices} + if missing_choices: + valid = False + missing_choices_str = ', '.join(map(lambda c: f'"{c}"', missing_choices)) + errors.append(f'Field "{format_path(path)}" has missing enum choices: {missing_choices_str}') + return valid, errors + + +def set_field_origin(definition: Union[dict[str, FieldDefinition], FieldDefinition], predefined_fields: Union[dict, FieldDefinition, None]): + """ + Sets definition.origin recursively + """ + if isinstance(definition, dict): + out = {} + for k, d in definition.items(): + out[k] = set_field_origin(d, predefined_fields=predefined_fields.get(k) if predefined_fields else None) + return out + else: + out = dataclasses.replace(definition, origin=getattr(predefined_fields, 'origin', FieldOrigin.CUSTOM)) + + if out.type == FieldDataType.OBJECT: + out.properties = set_field_origin(out.properties, predefined_fields=getattr(predefined_fields, 'properties', None)) + elif out.type == FieldDataType.LIST: + out.items = set_field_origin(out.items, predefined_fields=getattr(predefined_fields, 'items', None)) + return out + + +def iterate_fields(value: Union[dict, Any], definition: Union[dict[str, FieldDefinition], FieldDefinition], path: Optional[tuple[str]] = None) -> Iterable[tuple[tuple[str], Any, FieldDefinition]]: + """ + Recursively iterate over all defined fields + """ + if not definition: + return + + path = path or tuple() + if isinstance(definition, dict): + for k, d in definition.items(): + yield from iterate_fields(value=(value if isinstance(value, dict) else {}).get(k), definition=d, path=path + tuple([k])) + else: + # Current field + yield path, value, definition + + # Nested structures + if definition.type == FieldDataType.OBJECT: + yield from iterate_fields(value=value or {}, definition=definition.properties, path=path) + elif definition.type == FieldDataType.LIST: + for idx, v in enumerate(value if isinstance(value, list) else []): + yield from iterate_fields(value=v, definition=definition.items, path=path + tuple(['[' + str(idx) + ']'])) + diff --git a/api/src/reportcreator_api/pentests/customfields/validators.py b/api/src/reportcreator_api/pentests/customfields/validators.py new file mode 100644 index 0000000..89113a2 --- /dev/null +++ b/api/src/reportcreator_api/pentests/customfields/validators.py @@ -0,0 +1,136 @@ +import functools +import itertools +import json +import jsonschema +from pathlib import Path +from django.core.exceptions import ValidationError +from django.utils.deconstruct import deconstructible +from reportcreator_api.pentests.customfields.types import FieldDataType, FieldDefinition, parse_field_definition + +from reportcreator_api.pentests.customfields.utils import contains + + + +@functools.cache +def get_field_definition_schema(): + return jsonschema.Draft202012Validator(schema=json.loads((Path(__file__).parent / 'fielddefinition.schema.json').read_text())) + + +@functools.cache +def get_section_definition_schema(): + return jsonschema.Draft202012Validator(schema=json.loads((Path(__file__).parent / 'sectiondefinition.schema.json').read_text())) + + +@deconstructible +class FieldDefinitionValidator: + def __init__(self, core_fields=None, predefined_fields=None) -> None: + self.core_fields = core_fields or {} + self.predefined_fields = predefined_fields or {} + + def definition_contains(self, val: FieldDefinition, ref: FieldDefinition): + """ + Check if data types and structure of field definitions match recursively + The defintion `ref` has to be included in `val`. + `val` may extend the nested structure by adding fields, but may not remove any fields. + """ + if val.type != ref.type: + return False + if val.type == FieldDataType.OBJECT: + if set(ref.properties.keys()).difference(val.properties.keys()): + return False + return all([self.definition_contains(val.properties[k], d) for k, d in ref.properties.items()]) + elif val.type == FieldDataType.LIST: + return self.definition_contains(val.items, ref.items) + return True + + def __call__(self, value: dict): + try: + get_field_definition_schema().validate(value) + except jsonschema.ValidationError as ex: + raise ValidationError('Invalid field definition') from ex + + parsed_value = parse_field_definition(value) + # validate core fields: + # required + # structure cannot be changed + # labels and default values can be changed + for k, d in self.core_fields.items(): + if k not in parsed_value: + raise ValidationError(f'Core field "{k}" is required') + if not self.definition_contains(parsed_value[k], d): + raise ValidationError(f'Cannot change structure of core field "{k}"') + + # validate predefined fields: + # not required + # base structure cannot be changed, but can be extended + # labels and default values can be changed + for k, d in self.predefined_fields.items(): + if k in parsed_value and not self.definition_contains(parsed_value[k], d): + raise ValidationError(f'Cannot change structure of predefined field "{k}"') + + +@deconstructible +class FieldValuesValidator: + def __init__(self, field_definitions: dict[str, FieldDefinition]) -> None: + self.schema = self.compile_definition_to_schema(field_definitions) + + def compile_object(self, definition: dict): + return { + 'type': 'object', + 'additionalProperties': True, + 'properties': dict(map(lambda t: (t[0], self.compile_field(t[1])), definition.items())), + 'required': list(definition.keys()), + } + + def compile_field(self, definition: FieldDataType): + field_type = definition.type + if field_type in [FieldDataType.STRING, FieldDataType.MARKDOWN, FieldDataType.CVSS, FieldDataType.COMBOBOX]: + return {'type': ['string', 'null']} + elif field_type == FieldDataType.DATE: + return {'type': ['string', 'null'], 'format': 'date'} + elif field_type == FieldDataType.NUMBER: + return {'type': ['number', 'null']} + elif field_type == FieldDataType.BOOLEAN: + return {'type': ['boolean', 'null']} + elif field_type == FieldDataType.ENUM: + return {'type': ['string', 'null'], 'enum': [c.value for c in definition.choices] + [None]} + elif field_type == FieldDataType.USER: + return {'type': ['string', 'null'], 'format': 'uuid'} + elif field_type == FieldDataType.OBJECT: + return self.compile_object(definition.properties) + elif field_type == FieldDataType.LIST: + return {'type': 'array', 'items': self.compile_field(definition.items)} + else: + raise ValueError(f'Encountered invalid type in field definition: "{field_type}"') + + def compile_definition_to_schema(self, field_definitions): + return jsonschema.Draft202012Validator(schema={ + "$schema": "https://json-schema.org/draft/2019-09/schema", + **self.compile_object(field_definitions), + }) + + def __call__(self, value): + try: + self.schema.validate(value) + except jsonschema.ValidationError as ex: + raise ValidationError('Data does not match field definition') from ex + + +@deconstructible +class SectionDefinitionValidator: + def __call__(self, value): + try: + get_section_definition_schema().validate(value) + except jsonschema.ValidationError as ex: + raise ValidationError('Invalid section definition') from ex + + # validate unique section IDs + section_ids = [s['id'] for s in value] + if len(section_ids) != len(set(section_ids)): + raise ValidationError('Invalid section definition: Duplicate section IDs') + + # validate that a field is at most in one section + section_fields = list(itertools.chain(*map(lambda s: s['fields'], value))) + if len(section_fields) != len(set(section_fields)): + raise ValidationError('Invalid section definition: Field in multiple sections') + diff --git a/api/src/reportcreator_api/pentests/cvss.py b/api/src/reportcreator_api/pentests/cvss.py new file mode 100644 index 0000000..ca2d743 --- /dev/null +++ b/api/src/reportcreator_api/pentests/cvss.py @@ -0,0 +1,309 @@ +import math +import re +from typing import Any, Union + +from django.core.exceptions import ValidationError +from django.utils.translation import gettext_lazy as _ +from django.db import models + + +class CVSSLevel(models.TextChoices): + INFO = 'info', _('Info') + LOW = 'low', _('Low') + MEDIUM = 'medium', _('Medium') + HIGH = 'high', _('High') + CRITICAL = 'critical', _('Critical') + + +CVSS3_REGEX = re.compile(r'^CVSS:3.[01](/[A-Za-z]+:[A-Za-z])+$') +CVSS3_METRICS_BASE = { + 'AV': {'N': 0.85, 'A': 0.62, 'L': 0.55, 'P': 0.2}, + 'AC': {'L': 0.77, 'H': 0.44}, + 'PR': {'N': {'U': 0.85, 'C': 0.85}, 'L': {'U': 0.62, 'C': 0.68}, 'H': {'U': 0.27, 'C': 0.5}}, + 'UI': {'N': 0.85, 'R': 0.62}, + 'S': {'U': 'U', 'C': 'C'}, + 'C': {'N': 0, 'L': 0.22, 'H': 0.56}, + 'I': {'N': 0, 'L': 0.22, 'H': 0.56}, + 'A': {'N': 0, 'L': 0.22, 'H': 0.56}, +} +CVSS3_METRICS_TEMPORAL = { + 'E': {'X': 1, 'H': 1, 'F': 0.97, 'P': 0.94, 'U': 0.91}, + 'RL': {'X': 1, 'U': 1, 'W': 0.97, 'T': 0.96, 'O': 0.95}, + 'RC': {'X': 1, 'C': 1, 'R': 0.96, 'U': 0.92}, +} +CVSS3_METRICS_ENVIRONMENTAL = { + 'CR': {'X': 1, 'L': 0.5, 'M': 1, 'H': 1.5}, + 'IR': {'X': 1, 'L': 0.5, 'M': 1, 'H': 1.5}, + 'AR': {'X': 1, 'L': 0.5, 'M': 1, 'H': 1.5}, + 'MAV': {'X': None, 'N': 0.85, 'A': 0.62, 'L': 0.55, 'P': 0.2}, + 'MAC': {'X': None, 'L': 0.77, 'H': 0.44}, + 'MPR': {'X': None, 'N': {'U': 0.85, 'C': 0.85}, 'L': {'U': 0.62, 'C': 0.68}, 'H': {'U': 0.27, 'C': 0.5}}, + 'MUI': {'X': None, 'N': 0.85, 'R': 0.62}, + 'MS': {'X': None, 'U': 'U', 'C': 'C'}, + 'MC': {'X': None, 'N': 0, 'L': 0.22, 'H': 0.56}, + 'MI': {'X': None, 'N': 0, 'L': 0.22, 'H': 0.56}, + 'MA': {'X': None, 'N': 0, 'L': 0.22, 'H': 0.56}, +} +CVSS3_METRICS = CVSS3_METRICS_BASE | CVSS3_METRICS_TEMPORAL | CVSS3_METRICS_ENVIRONMENTAL + +CVSS2_REGEX = re.compile(r'(/?[A-Za-z]+:[A-Z]+)+') +CVSS2_METRICS = { + 'AV': {'L': 0.395, 'A': 0.646, 'N': 1.0}, + 'AC': {'H': 0.35, 'M': 0.61, 'L': 0.71}, + 'Au': {'M': 0.45, 'S': 0.56, 'N': 0.71}, + 'C': {'N': 0, 'P': 0.275, 'C': 0.660}, + 'I': {'N': 0, 'P': 0.275, 'C': 0.660}, + 'A': {'N': 0, 'P': 0.275, 'C': 0.660}, + + 'E': {'ND': 1, 'H': 1, 'F': 0.95, 'P': 0.90, 'U': 0.85}, + 'RL': {'ND': 1, 'U': 1, 'W': 0.95, 'TF': 0.90, 'OF': 0.87}, + 'RC': {'ND': 1, 'C': 1, 'UR': 0.95, 'UC': 0.90}, + + 'CDP': {'ND': 0, 'N': 0, 'L': 0.1, 'LM': 0.3, 'MH': 0.4, 'H': 0.5}, + 'TD': {'ND': 1, 'N': 0, 'L': 0.25, 'M': 0.75, 'H': 1}, + 'CR': {'ND': 1, 'L': 0.5, 'M': 1, 'H': 1.51}, + 'IR': {'ND': 1, 'L': 0.5, 'M': 1, 'H': 1.51}, + 'AR': {'ND': 1, 'L': 0.5, 'M': 1, 'H': 1.51}, +} +CVSS2_REQUIRED_METRICS = ['AV', 'AC', 'Au', 'C', 'I', 'A'] + + +def parse_cvss3(vector, version = '3.0'): + """ + Parses CVSS3.0 and CVSS3.1 vectors. + For CVSS 3.0 and 3.1 the metrics are the same. Only descriptions and definitions changed. + """ + if not vector or not CVSS3_REGEX.match(vector) or not vector.startswith('CVSS:' + version): + raise ValidationError(f'Invalid CVSS:{version} vector: Invalid format') + + # parse CVSS metrics + values = dict(map(lambda p: tuple(p.split(':')), filter(None, vector[8:].split('/')))) + for k, v in values.items(): + if k not in CVSS3_METRICS or v not in CVSS3_METRICS[k]: + raise ValidationError(f'Invalid CVSS:{version} vector: invalid metric value "{k}:{v}"') + + # Validate required metrics + for m in CVSS3_METRICS_BASE.keys(): + if m not in values: + raise ValidationError(f'Invalid CVSS{version} vector: base metric "{m}" missing') + + return values + + +def is_cvss3_0(vector): + try: + parse_cvss3(vector, version='3.0') + return True + except ValidationError: + return False + + +def is_cvss3_1(vector): + try: + parse_cvss3(vector, version='3.1') + return True + except ValidationError: + return False + + +def round_up(input): + int_input = round(input * 100000) + if int_input % 10000 == 0: + return int_input / 100000.0 + else: + return (math.floor(int_input / 10000) + 1) / 10.0 + + +def calculate_score_cvss3_0(vector) -> Union[float, None]: + try: + values = parse_cvss3(vector, version='3.0') + except ValidationError: + return None + + def metric(name, modified=False) -> Any: + # First try modified metric, then original metric, then X (Not Definied) + if modified: + m = CVSS3_METRICS.get('M' + name, {}).get(values.get('M' + name)) + if m is not None and m != 'X': + return m + m = CVSS3_METRICS.get(name, {}).get(values.get(name)) + if m is not None: + return m + return CVSS3_METRICS.get(name, {}).get('X') + + # Environmental Score calculation (this is the final score shown to the user) + scope_changed = metric('S', modified=True) == 'C' + isc = min(1 - ( + (1 - metric('C', modified=True) * metric('CR')) * + (1 - metric('I', modified=True) * metric('IR')) * + (1 - metric('A', modified=True) * metric('AR')) + ), 0.915) + impact = 7.52 * (isc - 0.029) - 3.25 * pow(isc - 0.02, 15) if scope_changed else \ + 6.42 * isc + exploitability = 8.22 * metric('AV', modified=True) * metric('AC', modified=True) * metric('PR', modified=True)[metric('S', modified=True)] * metric('UI', modified=True) + score = 0.0 if impact <= 0 else ( + round_up(min(1.08 * (impact + exploitability), 10)) if scope_changed else + round_up(min(impact + exploitability, 10)) + ) + score = round_up(score * metric('E') * metric('RL') * metric('RC')) + return score + + +def calculate_score_cvss3_1(vector) -> Union[float, None]: + try: + values = parse_cvss3(vector, version='3.1') + except ValidationError: + return None + + def has_metric_group(group): + return any(map(lambda m: m in values and values[m] != 'X', group.keys())) + + def metric(name, modified=False) -> Any: + # First try modified metric, then original metric, then X (Not Definied) + if modified: + m = CVSS3_METRICS.get('M' + name, {}).get(values.get('M' + name)) + if m is not None and m != 'X': + return m + m = CVSS3_METRICS.get(name, {}).get(values.get(name)) + if m is not None: + return m + return CVSS3_METRICS.get(name, {}).get('X') + + # Environmental score + if has_metric_group(CVSS3_METRICS_ENVIRONMENTAL): + m_scope_changed = metric('S', modified=True) == 'C' + miss = min(1 - ( + (1 - metric('C', modified=True) * metric('CR')) * + (1 - metric('I', modified=True) * metric('IR')) * + (1 - metric('A', modified=True) * metric('AR')) + ), 0.915) + m_impact = 7.52 * (miss - 0.029) - 3.25 * pow(miss * 0.9731 - 0.02, 13) if m_scope_changed else \ + 6.42 * miss + m_exploitability = 8.22 * metric('AV', modified=True) * metric('AC', modified=True) * metric('PR', modified=True)[metric('S', modified=True)] * metric('UI', modified=True) + env_score = 0.0 if m_impact <= 0 else ( + round_up(round_up(min(1.08 * (m_impact + m_exploitability), 10)) * metric('E') * metric('RL') * metric('RC')) if m_scope_changed else + round_up(round_up(min(m_impact + m_exploitability, 10)) * metric('E') * metric('RL') * metric('RC')) + ) + return env_score + + # Base score + scope_changed = metric('S') == 'C' + iss = 1 - ((1 - metric('C')) * (1 - metric('I')) *(1 - metric('A'))) + impact = (7.52 * (iss - 0.029) - 3.25 * pow(iss - 0.02, 15)) if scope_changed else \ + 6.42 * iss + exploitability = 8.22 * metric('AV') * metric('AC') * metric('PR')[metric('S')] * metric('UI') + score = 0.0 if impact <= 0 else ( + round_up(min(1.08 * (impact + exploitability), 10)) if scope_changed else + round_up(min(impact + exploitability, 10)) + ) + + # Temporal score + if has_metric_group(CVSS3_METRICS_TEMPORAL): + score = round_up(score * metric('E') * metric('RL') * metric('RC')) + return score + + +def parse_cvss2(vector): + # Strip non-standardized prefix + vector = (vector or '').replace('CVSS2#', '') + + if not vector or not CVSS2_REGEX.match(vector): + raise ValidationError('Invalid CVSS2 vector: Invalid format') + + # parse CVSS metrics + values = dict(map(lambda p: tuple(p.split(':')), filter(None, vector.split('/')))) + for k, v in values.items(): + if k not in CVSS2_METRICS or v not in CVSS2_METRICS[k]: + raise ValidationError(f'Invalid CVSS2 vector: invalid metric value "{k}:{v}"') + + # Validate required metrics + for m in CVSS2_REQUIRED_METRICS: + if m not in values: + raise ValidationError(f'Invalid CVSS2 vector: base metric "{m}" missing') + + return values + + +def is_cvss2(vector): + try: + parse_cvss2(vector) + return True + except ValidationError: + return False + + +def calculate_score_cvss2(vector): + try: + values = parse_cvss2(vector) + except ValidationError: + return None + + def metric(name): + m = CVSS2_METRICS.get(name, {}).get(values.get(name)) + if m is not None: + return m + return CVSS2_METRICS.get(name, {}).get('ND') + + def round_up(inp): + return round(inp, ndigits=1) + + # Environmental Score calculation (this is the final score shown to the user) + adjusted_impact = min(10.41 * (1 - ( + (1 - metric('C') * metric('CR')) * + (1 - metric('I') * metric('IR')) * + (1 - metric('A') * metric('AR'))) + ), 10) + exploitability = 20 * metric('AV') * metric('AC') * metric('Au') + adjusted_base_score = round_up( + ((0.6 * adjusted_impact) + (0.4 * exploitability) - 1.5) * + (0 if adjusted_impact == 0 else 1.176)) + adjusted_temporal = round_up(adjusted_base_score * metric('E') * metric('RL') * metric('RC')) + environmental_score = round_up((adjusted_temporal + (10 - adjusted_temporal) * metric('CDP')) * metric('TD')) + return environmental_score + + +def is_cvss(vector): + return is_cvss3_1(vector) or is_cvss3_0(vector) or is_cvss2(vector) + + +def calculate_score(vector) -> float: + """ + Calculate the CVSS score from a CVSS vector. + Supports CVSS v2, v3.0 and v3.1 + """ + if (score := calculate_score_cvss3_1(vector)) is not None: + return score + elif (score := calculate_score_cvss3_0(vector)) is not None: + return score + elif (score := calculate_score_cvss2(vector)) is not None: + return score + return 0.0 + + +def level_from_score(score: float) -> CVSSLevel: + """ + Calculate the CVSS level from a CVSS score. + """ + if score >= 9.0: + return CVSSLevel.CRITICAL + elif score >= 7.0: + return CVSSLevel.HIGH + elif score >= 4.0: + return CVSSLevel.MEDIUM + elif score > 0: + return CVSSLevel.LOW + else: + return CVSSLevel.INFO + + +def level_number_from_score(score: float) -> int: + if score >= 9.0: + return 5 + elif score >= 7.0: + return 4 + elif score >= 4.0: + return 3 + elif score > 0: + return 2 + else: + return 1 diff --git a/api/src/reportcreator_api/pentests/migrations/0001_initial.py b/api/src/reportcreator_api/pentests/migrations/0001_initial.py new file mode 100644 index 0000000..a7b18e8 --- /dev/null +++ b/api/src/reportcreator_api/pentests/migrations/0001_initial.py @@ -0,0 +1,122 @@ +# Generated by Django 4.0.4 on 2022-07-08 11:08 + +import django.core.serializers.json +from django.db import migrations, models +import django.db.models.deletion +import reportcreator_api.pentests.customfields.mixins +import reportcreator_api.pentests.customfields.validators +import reportcreator_api.pentests.customfields.predefined_fields +import reportcreator_api.pentests.models +import reportcreator_api.pentests.storages +import reportcreator_api.utils.models +import uuid + + +class Migration(migrations.Migration): + + initial = True + + dependencies = [ + ] + + operations = [ + migrations.CreateModel( + name='PentestFinding', + fields=[ + ('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)), + ('created', models.DateTimeField(auto_now_add=True)), + ('updated', models.DateTimeField(auto_now=True)), + ('title', models.TextField(default='')), + ('cvss', models.CharField(default='n/a', max_length=50)), + ('risk_score', models.FloatField(default=0.0)), + ('risk_level', models.CharField(choices=[('none', 'None'), ('low', 'Low'), ('medium', 'Medium'), ('high', 'High'), ('critical', 'Critical')], default='none', max_length=10)), + ('custom_fields', models.JSONField(default=dict, encoder=django.core.serializers.json.DjangoJSONEncoder)), + ], + options={ + 'ordering': ['-created'], + 'abstract': False, + }, + bases=(reportcreator_api.pentests.customfields.mixins.CustomFieldsMixin, models.Model, reportcreator_api.utils.models.ModelDiffMixin), + ), + migrations.CreateModel( + name='PentestProject', + fields=[ + ('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)), + ('created', models.DateTimeField(auto_now_add=True)), + ('updated', models.DateTimeField(auto_now=True)), + ('name', models.CharField(max_length=255)), + ], + options={ + 'ordering': ['-created'], + 'abstract': False, + }, + bases=(models.Model, reportcreator_api.utils.models.ModelDiffMixin), + ), + migrations.CreateModel( + name='PentestReport', + fields=[ + ('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)), + ('created', models.DateTimeField(auto_now_add=True)), + ('updated', models.DateTimeField(auto_now=True)), + ('title', models.TextField(default='')), + ('custom_fields', models.JSONField(default=dict, encoder=django.core.serializers.json.DjangoJSONEncoder)), + ], + options={ + 'ordering': ['-created'], + 'abstract': False, + }, + bases=(reportcreator_api.pentests.customfields.mixins.CustomFieldsMixin, models.Model, reportcreator_api.utils.models.ModelDiffMixin), + ), + migrations.CreateModel( + name='ProjectType', + fields=[ + ('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)), + ('created', models.DateTimeField(auto_now_add=True)), + ('updated', models.DateTimeField(auto_now=True)), + ('name', models.CharField(max_length=255)), + ('report_template', models.TextField(default='')), + ('report_styles', models.TextField(default='')), + ('report_preview_data', models.JSONField(default=dict, encoder=django.core.serializers.json.DjangoJSONEncoder)), + ('report_fields', models.JSONField(default=reportcreator_api.pentests.customfields.predefined_fields.report_fields_default, encoder=django.core.serializers.json.DjangoJSONEncoder, validators=[reportcreator_api.pentests.customfields.validators.FieldDefinitionValidator({'title': {'static': True, 'type': 'string'}})])), + ('report_sections', models.JSONField(default=reportcreator_api.pentests.customfields.predefined_fields.report_sections_default, encoder=django.core.serializers.json.DjangoJSONEncoder, validators=[reportcreator_api.pentests.customfields.validators.SectionDefinitionValidator()])), + ('finding_fields', models.JSONField(default=reportcreator_api.pentests.customfields.predefined_fields.finding_fields_default, encoder=django.core.serializers.json.DjangoJSONEncoder, validators=[reportcreator_api.pentests.customfields.validators.FieldDefinitionValidator({'cvss': {'static': True, 'type': 'cvss'}, 'title': {'static': True, 'type': 'string'}})])), + ('finding_field_order', models.JSONField(default=reportcreator_api.pentests.customfields.predefined_fields.finding_field_order_default, encoder=django.core.serializers.json.DjangoJSONEncoder)), + ], + options={ + 'ordering': ['-created'], + 'abstract': False, + }, + bases=(models.Model, reportcreator_api.utils.models.ModelDiffMixin), + ), + migrations.CreateModel( + name='UploadedImage', + fields=[ + ('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)), + ('created', models.DateTimeField(auto_now_add=True)), + ('updated', models.DateTimeField(auto_now=True)), + ('image', models.ImageField(storage=reportcreator_api.pentests.storages.get_uploaded_image_storage, upload_to='')), + ('project', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='pentests.pentestproject')), + ], + options={ + 'ordering': ['-created'], + 'abstract': False, + }, + bases=(models.Model, reportcreator_api.utils.models.ModelDiffMixin), + ), + migrations.CreateModel( + name='UploadedAsset', + fields=[ + ('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)), + ('created', models.DateTimeField(auto_now_add=True)), + ('updated', models.DateTimeField(auto_now=True)), + ('name', models.CharField(max_length=255)), + ('file', models.FileField(storage=reportcreator_api.pentests.storages.get_uploaded_asset_storage, upload_to='')), + ('project_type', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='pentests.projecttype')), + ], + options={ + 'ordering': ['-created'], + 'abstract': False, + }, + bases=(models.Model, reportcreator_api.utils.models.ModelDiffMixin), + ), + ] diff --git a/api/src/reportcreator_api/pentests/migrations/0002_initial.py b/api/src/reportcreator_api/pentests/migrations/0002_initial.py new file mode 100644 index 0000000..3b7eee7 --- /dev/null +++ b/api/src/reportcreator_api/pentests/migrations/0002_initial.py @@ -0,0 +1,38 @@ +# Generated by Django 4.0.4 on 2022-07-08 11:08 + +from django.conf import settings +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + + initial = True + + dependencies = [ + migrations.swappable_dependency(settings.AUTH_USER_MODEL), + ('pentests', '0001_initial'), + ] + + operations = [ + migrations.AddField( + model_name='pentestproject', + name='pentesters', + field=models.ManyToManyField(limit_choices_to=models.Q(('is_active', True)), to=settings.AUTH_USER_MODEL), + ), + migrations.AddField( + model_name='pentestproject', + name='project_type', + field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='pentests.projecttype'), + ), + migrations.AddField( + model_name='pentestproject', + name='report', + field=models.OneToOneField(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='project', to='pentests.pentestreport'), + ), + migrations.AddField( + model_name='pentestfinding', + name='report', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='pentests.pentestreport'), + ), + ] diff --git a/api/src/reportcreator_api/pentests/migrations/0003_findingtemplate_and_more.py b/api/src/reportcreator_api/pentests/migrations/0003_findingtemplate_and_more.py new file mode 100644 index 0000000..1a4bca6 --- /dev/null +++ b/api/src/reportcreator_api/pentests/migrations/0003_findingtemplate_and_more.py @@ -0,0 +1,99 @@ +# Generated by Django 4.0.4 on 2022-07-20 17:08 + +import django.core.serializers.json +from django.db import migrations, models +import reportcreator_api.pentests.customfields.mixins +import reportcreator_api.pentests.customfields.predefined_fields +import reportcreator_api.pentests.customfields.types +import reportcreator_api.pentests.customfields.utils +import reportcreator_api.pentests.customfields.validators +import reportcreator_api.utils.models +import uuid + + +def migrate_field_definition_static_to_origin(apps, schema_editor): + ProjectType = apps.get_model('pentests', 'projecttype') + for pt in ProjectType.objects.all(): + # Load and serialize definition to update format to current structure + pt.finding_fields = reportcreator_api.pentests.customfields.types.field_definition_to_dict( + reportcreator_api.pentests.customfields.utils.set_field_origin( + reportcreator_api.pentests.customfields.types.parse_field_definition(pt.finding_fields), + reportcreator_api.pentests.customfields.predefined_fields.FINDING_FIELDS_CORE | reportcreator_api.pentests.customfields.predefined_fields.FINDING_FIELDS_PREDEFINED)) + pt.report_fields = reportcreator_api.pentests.customfields.types.field_definition_to_dict( + reportcreator_api.pentests.customfields.utils.set_field_origin( + reportcreator_api.pentests.customfields.types.parse_field_definition(pt.report_fields), + reportcreator_api.pentests.customfields.predefined_fields.REPORT_FIELDS_CORE | reportcreator_api.pentests.customfields.predefined_fields.REPORT_FIELDS_PREDEFINED)) + pt.save() + + +class Migration(migrations.Migration): + + dependencies = [ + ('pentests', '0002_initial'), + ] + + operations = [ + migrations.CreateModel( + name='FindingTemplate', + fields=[ + ('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)), + ('created', models.DateTimeField(auto_now_add=True)), + ('updated', models.DateTimeField(auto_now=True)), + ('usage_count', models.PositiveIntegerField(db_index=True, default=0)), + ('tags', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=255), db_index=True, default=list, size=None)), + ('title', models.TextField(db_index=True, default='')), + ('cvss', models.CharField(default='n/a', max_length=50)), + ('custom_fields', models.JSONField(default=dict, encoder=django.core.serializers.json.DjangoJSONEncoder)), + ], + options={ + 'ordering': ['-usage_count', '-created'], + }, + bases=(reportcreator_api.pentests.customfields.mixins.CustomFieldsMixin, models.Model, reportcreator_api.utils.models.ModelDiffMixin), + ), + migrations.AlterField( + model_name='projecttype', + name='finding_fields', + field=models.JSONField(default=reportcreator_api.pentests.customfields.predefined_fields.finding_fields_default, encoder=django.core.serializers.json.DjangoJSONEncoder, validators=[reportcreator_api.pentests.customfields.validators.FieldDefinitionValidator(core_fields=reportcreator_api.pentests.customfields.predefined_fields.FINDING_FIELDS_CORE, predefined_fields=reportcreator_api.pentests.customfields.predefined_fields.FINDING_FIELDS_PREDEFINED)]), + ), + migrations.AlterField( + model_name='projecttype', + name='report_fields', + field=models.JSONField(default=reportcreator_api.pentests.customfields.predefined_fields.report_fields_default, encoder=django.core.serializers.json.DjangoJSONEncoder, validators=[reportcreator_api.pentests.customfields.validators.FieldDefinitionValidator(core_fields=reportcreator_api.pentests.customfields.predefined_fields.REPORT_FIELDS_CORE, predefined_fields=reportcreator_api.pentests.customfields.predefined_fields.REPORT_FIELDS_PREDEFINED)]), + ), + migrations.RunPython(code=migrate_field_definition_static_to_origin), + migrations.AlterField( + model_name='pentestfinding', + name='risk_level', + field=models.CharField(choices=[('none', 'None'), ('low', 'Low'), ('medium', 'Medium'), ('high', 'High'), ('critical', 'Critical')], db_index=True, default='none', max_length=10), + ), + migrations.AlterField( + model_name='pentestfinding', + name='risk_score', + field=models.FloatField(db_index=True, default=0.0), + ), + migrations.AlterField( + model_name='pentestfinding', + name='title', + field=models.TextField(db_index=True, default=''), + ), + migrations.AlterField( + model_name='pentestproject', + name='name', + field=models.CharField(db_index=True, max_length=255), + ), + migrations.AlterField( + model_name='pentestreport', + name='title', + field=models.TextField(db_index=True, default=''), + ), + migrations.AlterField( + model_name='projecttype', + name='name', + field=models.CharField(db_index=True, max_length=255), + ), + migrations.AlterField( + model_name='uploadedasset', + name='name', + field=models.CharField(db_index=True, max_length=255), + ), + ] diff --git a/api/src/reportcreator_api/pentests/migrations/0004_alter_uploadedasset_unique_together.py b/api/src/reportcreator_api/pentests/migrations/0004_alter_uploadedasset_unique_together.py new file mode 100644 index 0000000..fe69ef1 --- /dev/null +++ b/api/src/reportcreator_api/pentests/migrations/0004_alter_uploadedasset_unique_together.py @@ -0,0 +1,17 @@ +# Generated by Django 4.0.4 on 2022-07-21 14:32 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ('pentests', '0003_findingtemplate_and_more'), + ] + + operations = [ + migrations.AlterUniqueTogether( + name='uploadedasset', + unique_together={('project_type', 'name')}, + ), + ] diff --git a/api/src/reportcreator_api/pentests/migrations/0005_reportsection_alter_findingtemplate_options_and_more.py b/api/src/reportcreator_api/pentests/migrations/0005_reportsection_alter_findingtemplate_options_and_more.py new file mode 100644 index 0000000..2056ebe --- /dev/null +++ b/api/src/reportcreator_api/pentests/migrations/0005_reportsection_alter_findingtemplate_options_and_more.py @@ -0,0 +1,66 @@ +# Generated by Django 4.0.4 on 2022-08-13 12:27 + +from django.conf import settings +from django.db import migrations, models +import django.db.models.deletion +import django.utils.timezone +import reportcreator_api.pentests.models +import reportcreator_api.utils.models +import uuid + + +class Migration(migrations.Migration): + + dependencies = [ + ('contenttypes', '0002_remove_content_type_name'), + migrations.swappable_dependency(settings.AUTH_USER_MODEL), + ('pentests', '0004_alter_uploadedasset_unique_together'), + ] + + operations = [ + migrations.CreateModel( + name='ReportSection', + fields=[ + ('id', models.CharField(max_length=255)), + ('virtual_pk', models.UUIDField(primary_key=True, serialize=False)), + ], + options={ + 'managed': False, + }, + bases=(reportcreator_api.pentests.models.LockableMixin, models.Model), + ), + migrations.AlterModelOptions( + name='findingtemplate', + options={'ordering': ['-usage_count', '-risk_score', '-created']}, + ), + migrations.AlterModelOptions( + name='pentestfinding', + options={'ordering': ['-risk_score', '-created']}, + ), + migrations.AddField( + model_name='findingtemplate', + name='risk_level', + field=models.CharField(choices=[('none', 'None'), ('low', 'Low'), ('medium', 'Medium'), ('high', 'High'), ('critical', 'Critical')], db_index=True, default='none', max_length=10), + ), + migrations.AddField( + model_name='findingtemplate', + name='risk_score', + field=models.FloatField(db_index=True, default=0.0), + ), + migrations.CreateModel( + name='LockInfo', + fields=[ + ('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)), + ('created', models.DateTimeField(auto_now_add=True)), + ('updated', models.DateTimeField(auto_now=True)), + ('object_id', models.UUIDField()), + ('last_ping', models.DateTimeField(default=django.utils.timezone.now)), + ('content_type', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='contenttypes.contenttype')), + ('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), + ], + options={ + 'unique_together': {('content_type', 'object_id')}, + }, + bases=(models.Model, reportcreator_api.utils.models.ModelDiffMixin), + ), + ] diff --git a/api/src/reportcreator_api/pentests/migrations/0006_rename_project_type_uploadedasset_linked_object_and_more.py b/api/src/reportcreator_api/pentests/migrations/0006_rename_project_type_uploadedasset_linked_object_and_more.py new file mode 100644 index 0000000..9958357 --- /dev/null +++ b/api/src/reportcreator_api/pentests/migrations/0006_rename_project_type_uploadedasset_linked_object_and_more.py @@ -0,0 +1,66 @@ +# Generated by Django 4.0.4 on 2022-08-22 09:18 + +from django.conf import settings +from django.db import migrations, models +import django.db.models.deletion + + +def generate_uploadedimage_name(apps, schema_editor): + UploadedImage = apps.get_model('pentests', 'UploadedImage') + imgs = list(UploadedImage.objects.all()) + for img in imgs: + img.name = img.file.name + UploadedImage.objects.bulk_update(imgs, fields=['name']) + + +class Migration(migrations.Migration): + + dependencies = [ + migrations.swappable_dependency(settings.AUTH_USER_MODEL), + ('pentests', '0005_reportsection_alter_findingtemplate_options_and_more'), + ] + + operations = [ + migrations.RenameField( + model_name='uploadedasset', + old_name='project_type', + new_name='linked_object', + ), + migrations.AddField( + model_name='uploadedasset', + name='uploaded_by', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL), + ), + migrations.AlterUniqueTogether( + name='uploadedasset', + unique_together={('linked_object', 'name')}, + ), + + migrations.RenameField( + model_name='uploadedimage', + old_name='image', + new_name='file', + ), + migrations.RenameField( + model_name='uploadedimage', + old_name='project', + new_name='linked_object', + ), + migrations.AddField( + model_name='uploadedimage', + name='uploaded_by', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL), + ), + migrations.AddField( + model_name='uploadedimage', + name='name', + field=models.CharField(db_index=True, default='', max_length=255), + ), + migrations.RunPython( + code=generate_uploadedimage_name, reverse_code=migrations.RunPython.noop, + ), + migrations.AlterUniqueTogether( + name='uploadedimage', + unique_together={('linked_object', 'name')}, + ), + ] diff --git a/api/src/reportcreator_api/pentests/migrations/0007_alter_findingtemplate_options_and_more.py b/api/src/reportcreator_api/pentests/migrations/0007_alter_findingtemplate_options_and_more.py new file mode 100644 index 0000000..61e00f1 --- /dev/null +++ b/api/src/reportcreator_api/pentests/migrations/0007_alter_findingtemplate_options_and_more.py @@ -0,0 +1,22 @@ +# Generated by Django 4.0.7 on 2022-08-24 13:56 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('pentests', '0006_rename_project_type_uploadedasset_linked_object_and_more'), + ] + + operations = [ + migrations.AlterModelOptions( + name='findingtemplate', + options={}, + ), + migrations.AlterField( + model_name='uploadedimage', + name='name', + field=models.CharField(db_index=True, max_length=255), + ), + ] diff --git a/api/src/reportcreator_api/pentests/migrations/0008_alter_projecttype_options_and_more.py b/api/src/reportcreator_api/pentests/migrations/0008_alter_projecttype_options_and_more.py new file mode 100644 index 0000000..c4b0998 --- /dev/null +++ b/api/src/reportcreator_api/pentests/migrations/0008_alter_projecttype_options_and_more.py @@ -0,0 +1,28 @@ +# Generated by Django 4.0.7 on 2022-08-26 13:20 + +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + + dependencies = [ + ('pentests', '0007_alter_findingtemplate_options_and_more'), + ] + + operations = [ + migrations.AlterModelOptions( + name='projecttype', + options={}, + ), + migrations.AlterField( + model_name='uploadedasset', + name='linked_object', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='assets', to='pentests.projecttype'), + ), + migrations.AlterField( + model_name='uploadedimage', + name='linked_object', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='images', to='pentests.pentestproject'), + ), + ] diff --git a/api/src/reportcreator_api/pentests/migrations/0009_alter_pentestproject_report.py b/api/src/reportcreator_api/pentests/migrations/0009_alter_pentestproject_report.py new file mode 100644 index 0000000..a83aba2 --- /dev/null +++ b/api/src/reportcreator_api/pentests/migrations/0009_alter_pentestproject_report.py @@ -0,0 +1,19 @@ +# Generated by Django 4.0.7 on 2022-08-29 08:22 + +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + + dependencies = [ + ('pentests', '0008_alter_projecttype_options_and_more'), + ] + + operations = [ + migrations.AlterField( + model_name='pentestproject', + name='report', + field=models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='project', to='pentests.pentestreport'), + ), + ] diff --git a/api/src/reportcreator_api/pentests/migrations/0010_alter_pentestproject_options_and_more.py b/api/src/reportcreator_api/pentests/migrations/0010_alter_pentestproject_options_and_more.py new file mode 100644 index 0000000..bbaa2f6 --- /dev/null +++ b/api/src/reportcreator_api/pentests/migrations/0010_alter_pentestproject_options_and_more.py @@ -0,0 +1,32 @@ +# Generated by Django 4.0.7 on 2022-08-29 14:39 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('pentests', '0009_alter_pentestproject_report'), + ] + + operations = [ + migrations.AlterModelOptions( + name='pentestproject', + options={}, + ), + migrations.AddField( + model_name='findingtemplate', + name='language', + field=models.CharField(choices=[('en-US', 'English'), ('de-DE', 'German')], db_index=True, default='de-DE', max_length=5), + ), + migrations.AddField( + model_name='pentestproject', + name='language', + field=models.CharField(choices=[('en-US', 'English'), ('de-DE', 'German')], db_index=True, default='de-DE', max_length=5), + ), + migrations.AddField( + model_name='projecttype', + name='language', + field=models.CharField(choices=[('en-US', 'English'), ('de-DE', 'German')], db_index=True, default='de-DE', max_length=5), + ), + ] diff --git a/api/src/reportcreator_api/pentests/migrations/0011_reportsection.py b/api/src/reportcreator_api/pentests/migrations/0011_reportsection.py new file mode 100644 index 0000000..6556706 --- /dev/null +++ b/api/src/reportcreator_api/pentests/migrations/0011_reportsection.py @@ -0,0 +1,45 @@ +# Generated by Django 4.0.7 on 2022-08-31 07:44 + +from django.db import migrations, models +import django.db.models.deletion +from reportcreator_api.pentests.models import ReportSection +import reportcreator_api.utils.models +import uuid + + +def migrate_create_sections(apps, schema_editor): + ReportSection = apps.get_model('pentests', 'ReportSection') + PentestProject = apps.get_model('pentests', 'PentestProject') + + sections = [] + for p in PentestProject.objects.select_related('report', 'project_type').all(): + sections.extend([ReportSection(report=p.report, section_id=s.get('id')) for s in p.project_type.report_sections]) + ReportSection.objects.bulk_create(sections) + + + +class Migration(migrations.Migration): + + dependencies = [ + ('pentests', '0010_alter_pentestproject_options_and_more'), + ] + + operations = [ + migrations.CreateModel( + name='ReportSection', + fields=[ + ('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)), + ('created', models.DateTimeField(auto_now_add=True)), + ('updated', models.DateTimeField(auto_now=True)), + ('section_id', models.CharField(db_index=True, max_length=255)), + ('report', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='pentests.pentestreport')), + ], + options={ + 'ordering': ['-created'], + 'abstract': False, + 'unique_together': {('report', 'section_id')}, + }, + bases=(reportcreator_api.utils.models.ModelDiffMixin, models.Model), + ), + migrations.RunPython(code=migrate_create_sections, reverse_code=migrations.RunPython.noop), + ] diff --git a/api/src/reportcreator_api/pentests/migrations/0012_pentestfinding_assignee_reportsection_assignee.py b/api/src/reportcreator_api/pentests/migrations/0012_pentestfinding_assignee_reportsection_assignee.py new file mode 100644 index 0000000..d3197a0 --- /dev/null +++ b/api/src/reportcreator_api/pentests/migrations/0012_pentestfinding_assignee_reportsection_assignee.py @@ -0,0 +1,26 @@ +# Generated by Django 4.0.7 on 2022-08-31 08:46 + +from django.conf import settings +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + + dependencies = [ + migrations.swappable_dependency(settings.AUTH_USER_MODEL), + ('pentests', '0011_reportsection'), + ] + + operations = [ + migrations.AddField( + model_name='pentestfinding', + name='assignee', + field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL), + ), + migrations.AddField( + model_name='reportsection', + name='assignee', + field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL), + ), + ] diff --git a/api/src/reportcreator_api/pentests/migrations/0013_pentestfinding_template_and_more.py b/api/src/reportcreator_api/pentests/migrations/0013_pentestfinding_template_and_more.py new file mode 100644 index 0000000..a5bc880 --- /dev/null +++ b/api/src/reportcreator_api/pentests/migrations/0013_pentestfinding_template_and_more.py @@ -0,0 +1,52 @@ +# Generated by Django 4.0.7 on 2022-09-20 09:55 + +from django.db import migrations, models +import django.db.models.deletion + + +def rename_risklevel_none_to_info(apps, schema_editor): + PentestFinding = apps.get_model('pentests', 'PentestFinding') + FindingTemplate = apps.get_model('pentests', 'FindingTemplate') + + PentestFinding.objects \ + .filter(risk_level='none') \ + .update(risk_level='info') + FindingTemplate.objects \ + .filter(risk_level='none') \ + .update(risk_level='info') + + +class Migration(migrations.Migration): + + dependencies = [ + ('pentests', '0012_pentestfinding_assignee_reportsection_assignee'), + ] + + operations = [ + migrations.AddField( + model_name='pentestfinding', + name='template', + field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='pentests.findingtemplate'), + ), + migrations.AlterField( + model_name='findingtemplate', + name='risk_level', + field=models.CharField(choices=[('info', 'Info'), ('low', 'Low'), ('medium', 'Medium'), ('high', 'High'), ('critical', 'Critical')], db_index=True, default='info', max_length=10), + ), + migrations.AlterField( + model_name='pentestfinding', + name='report', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='findings', to='pentests.pentestreport'), + ), + migrations.AlterField( + model_name='pentestfinding', + name='risk_level', + field=models.CharField(choices=[('info', 'Info'), ('low', 'Low'), ('medium', 'Medium'), ('high', 'High'), ('critical', 'Critical')], db_index=True, default='info', max_length=10), + ), + migrations.AlterField( + model_name='reportsection', + name='report', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='sections', to='pentests.pentestreport'), + ), + migrations.RunPython(code=rename_risklevel_none_to_info), + ] diff --git a/api/src/reportcreator_api/pentests/migrations/0014_auto_20220922_1337.py b/api/src/reportcreator_api/pentests/migrations/0014_auto_20220922_1337.py new file mode 100644 index 0000000..e752a3c --- /dev/null +++ b/api/src/reportcreator_api/pentests/migrations/0014_auto_20220922_1337.py @@ -0,0 +1,25 @@ +# Generated by Django 4.0.7 on 2022-09-22 13:37 + +from django.db import migrations + +from reportcreator_api.pentests.customfields.types import field_definition_to_dict, parse_field_definition + + +def migrate_add_required_to_field_definitions(apps, schema_editor): + ProjectType = apps.get_model('pentests', 'ProjectType') + project_types = list(ProjectType.objects.all()) + for project_type in project_types: + project_type.finding_fields = field_definition_to_dict(parse_field_definition(project_type.finding_fields)) + project_type.report_fields = field_definition_to_dict(parse_field_definition(project_type.report_fields)) + ProjectType.objects.bulk_update(project_types, fields=['finding_fields', 'report_fields']) + + +class Migration(migrations.Migration): + + dependencies = [ + ('pentests', '0013_pentestfinding_template_and_more'), + ] + + operations = [ + migrations.RunPython(code=migrate_add_required_to_field_definitions, reverse_code=migrations.RunPython.noop), + ] diff --git a/api/src/reportcreator_api/pentests/migrations/0015_alter_findingtemplate_cvss_alter_pentestfinding_cvss.py b/api/src/reportcreator_api/pentests/migrations/0015_alter_findingtemplate_cvss_alter_pentestfinding_cvss.py new file mode 100644 index 0000000..073c26d --- /dev/null +++ b/api/src/reportcreator_api/pentests/migrations/0015_alter_findingtemplate_cvss_alter_pentestfinding_cvss.py @@ -0,0 +1,23 @@ +# Generated by Django 4.0.7 on 2022-10-18 10:28 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('pentests', '0014_auto_20220922_1337'), + ] + + operations = [ + migrations.AlterField( + model_name='findingtemplate', + name='cvss', + field=models.CharField(default='n/a', max_length=255), + ), + migrations.AlterField( + model_name='pentestfinding', + name='cvss', + field=models.CharField(default='n/a', max_length=255), + ), + ] diff --git a/api/src/reportcreator_api/pentests/migrations/0016_alter_pentestreport_options_and_more.py b/api/src/reportcreator_api/pentests/migrations/0016_alter_pentestreport_options_and_more.py new file mode 100644 index 0000000..fcb9648 --- /dev/null +++ b/api/src/reportcreator_api/pentests/migrations/0016_alter_pentestreport_options_and_more.py @@ -0,0 +1,117 @@ +# Generated by Django 4.0.7 on 2022-10-13 08:10 + +import uuid +from django.conf import settings +import django.contrib.postgres.fields +from django.db import migrations, models +import reportcreator_api.utils.models + + +def migrate_set_finding_ids(apps, schema_editor): + PentestFinding = apps.get_model('pentests', 'PentestFinding') + PentestFinding.objects.update(finding_id=models.F('id')) + + +class Migration(migrations.Migration): + + dependencies = [ + ('pentests', '0015_alter_findingtemplate_cvss_alter_pentestfinding_cvss'), + migrations.swappable_dependency(settings.AUTH_USER_MODEL), + ] + + operations = [ + migrations.AlterModelOptions( + name='pentestreport', + options={}, + ), + migrations.AddField( + model_name='pentestproject', + name='imported_pentesters', + field=django.contrib.postgres.fields.ArrayField(base_field=models.JSONField(encoder=django.core.serializers.json.DjangoJSONEncoder), default=list, size=None), + ), + migrations.AlterField( + model_name='findingtemplate', + name='created', + field=models.DateTimeField(default=reportcreator_api.utils.models.now, editable=False), + ), + migrations.AlterField( + model_name='lockinfo', + name='created', + field=models.DateTimeField(default=reportcreator_api.utils.models.now, editable=False), + ), + migrations.AlterField( + model_name='pentestfinding', + name='created', + field=models.DateTimeField(default=reportcreator_api.utils.models.now, editable=False), + ), + migrations.AlterField( + model_name='pentestproject', + name='created', + field=models.DateTimeField(default=reportcreator_api.utils.models.now, editable=False), + ), + migrations.AlterField( + model_name='pentestreport', + name='created', + field=models.DateTimeField(default=reportcreator_api.utils.models.now, editable=False), + ), + migrations.AlterField( + model_name='projecttype', + name='created', + field=models.DateTimeField(default=reportcreator_api.utils.models.now, editable=False), + ), + migrations.AlterField( + model_name='reportsection', + name='created', + field=models.DateTimeField(default=reportcreator_api.utils.models.now, editable=False), + ), + migrations.AlterField( + model_name='uploadedasset', + name='created', + field=models.DateTimeField(default=reportcreator_api.utils.models.now, editable=False), + ), + migrations.AlterField( + model_name='uploadedimage', + name='created', + field=models.DateTimeField(default=reportcreator_api.utils.models.now, editable=False), + ), + migrations.AddField( + model_name='findingtemplate', + name='imported', + field=models.BooleanField(db_index=True, default=False), + ), + migrations.AddField( + model_name='pentestproject', + name='imported', + field=models.BooleanField(db_index=True, default=False), + ), + migrations.AddField( + model_name='projecttype', + name='imported', + field=models.BooleanField(db_index=True, default=False), + ), + migrations.AlterField( + model_name='pentestproject', + name='pentesters', + field=models.ManyToManyField(related_name='projects', to=settings.AUTH_USER_MODEL), + ), + migrations.AddField( + model_name='projecttype', + name='hidden', + field=models.BooleanField(db_index=True, default=False), + ), + migrations.AddField( + model_name='pentestfinding', + name='finding_id', + field=models.UUIDField(db_index=True, default=uuid.uuid4, editable=False), + ), + migrations.AlterField( + model_name='reportsection', + name='section_id', + field=models.CharField(db_index=True, editable=False, max_length=255), + ), + migrations.RunPython(code=migrate_set_finding_ids), + migrations.AlterUniqueTogether( + name='pentestfinding', + unique_together={('report', 'finding_id')}, + ), + ] diff --git a/api/src/reportcreator_api/pentests/migrations/0017_pentestproject_readonly.py b/api/src/reportcreator_api/pentests/migrations/0017_pentestproject_readonly.py new file mode 100644 index 0000000..7b677df --- /dev/null +++ b/api/src/reportcreator_api/pentests/migrations/0017_pentestproject_readonly.py @@ -0,0 +1,18 @@ +# Generated by Django 4.0.7 on 2022-10-18 14:29 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('pentests', '0016_alter_pentestreport_options_and_more'), + ] + + operations = [ + migrations.AddField( + model_name='pentestproject', + name='readonly', + field=models.BooleanField(db_index=True, default=False), + ), + ] diff --git a/api/src/reportcreator_api/pentests/migrations/0018_remove_findingtemplate_imported_and_more.py b/api/src/reportcreator_api/pentests/migrations/0018_remove_findingtemplate_imported_and_more.py new file mode 100644 index 0000000..3a9fc24 --- /dev/null +++ b/api/src/reportcreator_api/pentests/migrations/0018_remove_findingtemplate_imported_and_more.py @@ -0,0 +1,69 @@ +# Generated by Django 4.0.7 on 2022-10-23 08:47 + +from django.db import migrations, models +import django.db.models.deletion + +from reportcreator_api.pentests.models import SourceEnum + + +def migrate_to_source_enum(apps, schema_editor): + ProjectType = apps.get_model('pentests', 'ProjectType') + FindingTemplate = apps.get_model('pentests', 'FindingTemplate') + PentestProject = apps.get_model('pentests', 'PentestProject') + + FindingTemplate.objects \ + .filter(imported=True) \ + .update(source=SourceEnum.IMPORTED) + PentestProject.objects \ + .filter(imported=True) \ + .update(source=SourceEnum.IMPORTED) + ProjectType.objects \ + .filter(imported=True) \ + .filter(hidden=False) \ + .update(source=SourceEnum.IMPORTED) + ProjectType.objects \ + .filter(imported=True) \ + .filter(hidden=True) \ + .update(source=SourceEnum.IMPORTED_DEPENDENCY) + + +class Migration(migrations.Migration): + + dependencies = [ + ('pentests', '0017_pentestproject_readonly'), + ] + + operations = [ + migrations.AddField( + model_name='findingtemplate', + name='source', + field=models.CharField(choices=[('created', 'Created'), ('imported', 'Imported'), ('imported_dependency', 'Imported Dependency'), ('customized', 'Customized')], db_index=True, default='created', editable=False, max_length=50), + ), + migrations.AddField( + model_name='pentestproject', + name='source', + field=models.CharField(choices=[('created', 'Created'), ('imported', 'Imported'), ('imported_dependency', 'Imported Dependency'), ('customized', 'Customized')], db_index=True, default='created', editable=False, max_length=50), + ), + migrations.AddField( + model_name='projecttype', + name='source', + field=models.CharField(choices=[('created', 'Created'), ('imported', 'Imported'), ('imported_dependency', 'Imported Dependency'), ('customized', 'Customized')], db_index=True, default='created', editable=False, max_length=50), + ), + migrations.RunPython(code=migrate_to_source_enum), + migrations.RemoveField( + model_name='findingtemplate', + name='imported', + ), + migrations.RemoveField( + model_name='pentestproject', + name='imported', + ), + migrations.RemoveField( + model_name='projecttype', + name='hidden', + ), + migrations.RemoveField( + model_name='projecttype', + name='imported', + ), + ] diff --git a/api/src/reportcreator_api/pentests/migrations/0019_remove_report_1.py b/api/src/reportcreator_api/pentests/migrations/0019_remove_report_1.py new file mode 100644 index 0000000..f2a70a0 --- /dev/null +++ b/api/src/reportcreator_api/pentests/migrations/0019_remove_report_1.py @@ -0,0 +1,69 @@ +# Generated by Django 4.0.7 on 2022-10-23 10:53 + +import django.core.serializers.json +from django.db import migrations, models +import django.db.models.deletion + + +def migrate_report_to_project(apps, schema_editor): + PentestReport = apps.get_model('pentests', 'PentestReport') + PentestProject = apps.get_model('pentests', 'PentestProject') + ReportSection = apps.get_model('pentests', 'ReportSection') + PentestFinding = apps.get_model('pentests', 'PentestFinding') + + # Delete unreferenced reports (remainders of deleted projects) + PentestReport.objects \ + .filter(project__isnull=True) \ + .delete() + + projects = list(PentestProject.objects.select_related('report').all()) + for p in projects: + p.custom_fields = {'title': p.report.title} | p.report.custom_fields + PentestProject.objects.bulk_update(projects, ['custom_fields']) + + sections = list(ReportSection.objects.select_related('report__project').all()) + for s in sections: + s.project = s.report.project + ReportSection.objects.bulk_update(sections, ['project']) + + findings = list(PentestFinding.objects.select_related('report__project').all()) + for f in findings: + f.project = f.report.project + PentestFinding.objects.bulk_update(findings, ['project']) + + +class Migration(migrations.Migration): + + dependencies = [ + ('pentests', '0018_remove_findingtemplate_imported_and_more'), + ] + + operations = [ + migrations.AlterUniqueTogether( + name='pentestfinding', + unique_together=set(), + ), + migrations.AlterUniqueTogether( + name='reportsection', + unique_together=set(), + ), + migrations.AddField( + model_name='pentestfinding', + name='project', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='findings', to='pentests.pentestproject'), + preserve_default=False, + ), + migrations.AddField( + model_name='pentestproject', + name='custom_fields', + field=models.JSONField(default=dict, encoder=django.core.serializers.json.DjangoJSONEncoder), + ), + migrations.AddField( + model_name='reportsection', + name='project', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='sections', to='pentests.pentestproject'), + preserve_default=False, + ), + + migrations.RunPython(code=migrate_report_to_project), + ] diff --git a/api/src/reportcreator_api/pentests/migrations/0020_remove_report_2.py b/api/src/reportcreator_api/pentests/migrations/0020_remove_report_2.py new file mode 100644 index 0000000..da0834a --- /dev/null +++ b/api/src/reportcreator_api/pentests/migrations/0020_remove_report_2.py @@ -0,0 +1,47 @@ +# Generated by Django 4.0.7 on 2022-10-23 11:58 + +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + + dependencies = [ + ('pentests', '0019_remove_report_1'), + ] + + operations = [ + migrations.RemoveField( + model_name='pentestproject', + name='report', + ), + migrations.AlterField( + model_name='pentestfinding', + name='project', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='findings', to='pentests.pentestproject'), + ), + migrations.AlterField( + model_name='reportsection', + name='project', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='sections', to='pentests.pentestproject'), + ), + migrations.AlterUniqueTogether( + name='pentestfinding', + unique_together={('project', 'finding_id')}, + ), + migrations.AlterUniqueTogether( + name='reportsection', + unique_together={('project', 'section_id')}, + ), + migrations.RemoveField( + model_name='pentestfinding', + name='report', + ), + migrations.RemoveField( + model_name='reportsection', + name='report', + ), + migrations.DeleteModel( + name='PentestReport', + ), + ] diff --git a/api/src/reportcreator_api/pentests/migrations/0021_projecttype_linked_project.py b/api/src/reportcreator_api/pentests/migrations/0021_projecttype_linked_project.py new file mode 100644 index 0000000..445bd23 --- /dev/null +++ b/api/src/reportcreator_api/pentests/migrations/0021_projecttype_linked_project.py @@ -0,0 +1,38 @@ +# Generated by Django 4.0.7 on 2022-10-23 12:30 + +from django.db import migrations, models +import django.db.models.deletion + +from reportcreator_api.pentests.models import SourceEnum + + +def migrate_set_linked_project(apps, schema_editor): + ProjectType = apps.get_model('pentests', 'ProjectType') + linked_pts = list(ProjectType.objects \ + .filter(source=SourceEnum.IMPORTED_DEPENDENCY) \ + .annotate(project_count=models.Count('pentestproject')) \ + .filter(project_count=1) \ + .prefetch_related('pentestproject_set')) + for pt in linked_pts: + p = list(pt.pentestproject_set.all())[0] + if p.source != SourceEnum.IMPORTED: + continue + pt.linked_project = p + ProjectType.objects.bulk_update(linked_pts, ['linked_project']) + + + +class Migration(migrations.Migration): + + dependencies = [ + ('pentests', '0020_remove_report_2'), + ] + + operations = [ + migrations.AddField( + model_name='projecttype', + name='linked_project', + field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='pentests.pentestproject'), + ), + migrations.RunPython(code=migrate_set_linked_project, reverse_code=migrations.RunPython.noop), + ] diff --git a/api/src/reportcreator_api/pentests/migrations/0022_auto_20221024_1329.py b/api/src/reportcreator_api/pentests/migrations/0022_auto_20221024_1329.py new file mode 100644 index 0000000..e73b39b --- /dev/null +++ b/api/src/reportcreator_api/pentests/migrations/0022_auto_20221024_1329.py @@ -0,0 +1,27 @@ +# Generated by Django 4.0.7 on 2022-10-24 13:29 + +from django.db import migrations +from reportcreator_api.pentests.customfields.predefined_fields import FINDING_FIELDS_PREDEFINED +from reportcreator_api.pentests.customfields.types import field_definition_to_dict + + +def migrate_predefined_wstg(apps, schema_editor): + """ + Update predefined_field "wstg_category" in all ProjectTypes that use it + """ + ProjectType = apps.get_model('pentests', 'ProjectType') + + for pt in ProjectType.objects.filter(finding_fields__wstg_category__isnull=False): + pt.finding_fields['wstg_category'] = field_definition_to_dict(FINDING_FIELDS_PREDEFINED['wstg_category']) + pt.save() + + +class Migration(migrations.Migration): + + dependencies = [ + ('pentests', '0021_projecttype_linked_project'), + ] + + operations = [ + migrations.RunPython(code=migrate_predefined_wstg, reverse_code=migrations.RunPython.noop), + ] diff --git a/api/src/reportcreator_api/pentests/migrations/0023_pentestfinding_status_reportsection_status_and_more.py b/api/src/reportcreator_api/pentests/migrations/0023_pentestfinding_status_reportsection_status_and_more.py new file mode 100644 index 0000000..2375dda --- /dev/null +++ b/api/src/reportcreator_api/pentests/migrations/0023_pentestfinding_status_reportsection_status_and_more.py @@ -0,0 +1,53 @@ +# Generated by Django 4.1.3 on 2022-11-22 07:51 + +import django.contrib.postgres.fields +import django.core.serializers.json +from django.db import migrations, models + +from reportcreator_api.pentests.models import ReviewStatus + + +def migrate_set_status(apps, schema_editor): + PentestFinding = apps.get_model('pentests.PentestFinding') + ReportSection = apps.get_model('pentests.ReportSection') + FindingTemplate = apps.get_model('pentests.FindingTemplate') + + PentestFinding.objects \ + .filter(project__readonly=True) \ + .update(status=ReviewStatus.FINISHED) + ReportSection.objects \ + .filter(project__readonly=True) \ + .update(status=ReviewStatus.FINISHED) + FindingTemplate.objects \ + .update(status=ReviewStatus.FINISHED) + + +class Migration(migrations.Migration): + + dependencies = [ + ('pentests', '0022_auto_20221024_1329'), + ] + + operations = [ + migrations.AddField( + model_name='pentestfinding', + name='status', + field=models.CharField(choices=[('in-progress', 'In progress'), ('ready-for-review', 'Ready for review'), ('needs-improvement', 'Needs improvement'), ('finished', 'Finished')], db_index=True, default='in-progress', max_length=20), + ), + migrations.AddField( + model_name='reportsection', + name='status', + field=models.CharField(choices=[('in-progress', 'In progress'), ('ready-for-review', 'Ready for review'), ('needs-improvement', 'Needs improvement'), ('finished', 'Finished')], db_index=True, default='in-progress', max_length=20), + ), + migrations.AddField( + model_name='findingtemplate', + name='status', + field=models.CharField(choices=[('in-progress', 'In progress'), ('ready-for-review', 'Ready for review'), ('needs-improvement', 'Needs improvement'), ('finished', 'Finished')], db_index=True, default='in-progress', max_length=20), + ), + migrations.RunPython(code=migrate_set_status, reverse_code=migrations.RunPython.noop), + migrations.AlterField( + model_name='pentestproject', + name='imported_pentesters', + field=django.contrib.postgres.fields.ArrayField(base_field=models.JSONField(encoder=django.core.serializers.json.DjangoJSONEncoder), blank=True, default=list, size=None), + ), + ] diff --git a/api/src/reportcreator_api/pentests/migrations/0024_project_member_role.py b/api/src/reportcreator_api/pentests/migrations/0024_project_member_role.py new file mode 100644 index 0000000..30ce488 --- /dev/null +++ b/api/src/reportcreator_api/pentests/migrations/0024_project_member_role.py @@ -0,0 +1,94 @@ +# Generated by Django 4.1.3 on 2022-11-24 08:45 + +from django.conf import settings +from django.db import migrations, models +import django.db.models.deletion +import reportcreator_api.utils.models +import uuid + +def migrate_set_roles(apps, schema_editor): + PentestProject = apps.get_model('pentests.PentestProject') + ProjectMemberInfo = apps.get_model('pentests.ProjectMemberInfo') + ProjectMemberRole = apps.get_model('pentests.ProjectMemberRole') + OldThroughModel = PentestProject.pentesters.through + + ProjectMemberRole.objects.bulk_create([ + ProjectMemberRole(role='lead', default=False), + ProjectMemberRole(role='pentester', default=True), + ProjectMemberRole(role='reviewer', default=False), + ]) + default_roles = list(ProjectMemberRole.objects.filter(default=True).values_list('role', flat=True)) + + member_infos = [] + for mi in OldThroughModel.objects.all(): + member_infos.append(ProjectMemberInfo(project=mi.pentestproject, user=mi.pentestuser, roles=default_roles)) + ProjectMemberInfo.objects.bulk_create(member_infos) + + projects = list(PentestProject.objects.filter(imported_members__len__gt=0).all()) + for p in projects: + for m in p.imported_members: + m['roles'] = default_roles + p.save() + PentestProject.objects.bulk_update(projects, ['imported_members']) + + +def reverse_migrate_set_roles(apps, schema_editor): + PentestProject = apps.get_model('pentests.PentestProject') + ProjectMemberInfo = apps.get_model('pentests.ProjectMemberInfo') + OldThroughModel = PentestProject.pentesters.through + + old_member_infos = [] + for mi in ProjectMemberInfo.objects.all(): + old_member_infos.append(OldThroughModel(pentestproject=mi.project, pentestuser=mi.user)) + OldThroughModel.objects.bulk_create(old_member_infos) + + +class Migration(migrations.Migration): + + dependencies = [ + migrations.swappable_dependency(settings.AUTH_USER_MODEL), + ('pentests', '0023_pentestfinding_status_reportsection_status_and_more'), + ] + + operations = [ + migrations.RenameField( + model_name='pentestproject', + old_name='imported_pentesters', + new_name='imported_members', + ), + migrations.CreateModel( + name='ProjectMemberInfo', + fields=[ + ('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)), + ('created', models.DateTimeField(default=reportcreator_api.utils.models.now, editable=False)), + ('updated', models.DateTimeField(auto_now=True)), + ('roles', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=50), blank=True, default=list, size=None)), + ('project', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='members', to='pentests.pentestproject')), + ('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), + ], + options={ + 'unique_together': {('project', 'user')}, + }, + bases=(reportcreator_api.utils.models.ModelDiffMixin, models.Model), + ), + migrations.CreateModel( + name='ProjectMemberRole', + fields=[ + ('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)), + ('created', models.DateTimeField(default=reportcreator_api.utils.models.now, editable=False)), + ('updated', models.DateTimeField(auto_now=True)), + ('role', models.CharField(max_length=50, unique=True)), + ('default', models.BooleanField(default=False)), + ], + options={ + 'ordering': ['-created'], + 'abstract': False, + }, + bases=(reportcreator_api.utils.models.ModelDiffMixin, models.Model), + ), + migrations.RunPython(code=migrate_set_roles, reverse_code=reverse_migrate_set_roles), + migrations.RemoveField( + model_name='pentestproject', + name='pentesters', + ), + ] diff --git a/api/src/reportcreator_api/pentests/migrations/0025_db_encryption_1.py b/api/src/reportcreator_api/pentests/migrations/0025_db_encryption_1.py new file mode 100644 index 0000000..24568c3 --- /dev/null +++ b/api/src/reportcreator_api/pentests/migrations/0025_db_encryption_1.py @@ -0,0 +1,75 @@ +import django.core.serializers.json +from django.db import migrations, models +import reportcreator_api.archive.crypto.fields + + +class Migration(migrations.Migration): + + dependencies = [ + ('pentests', '0024_project_member_role'), + ] + + operations = [ + migrations.AddField( + model_name='pentestfinding', + name='custom_fields_new', + field=reportcreator_api.archive.crypto.fields.EncryptedField(base_field=models.JSONField(default=dict, encoder=django.core.serializers.json.DjangoJSONEncoder), editable=True), + ), + migrations.AddField( + model_name='pentestproject', + name='custom_fields_new', + field=reportcreator_api.archive.crypto.fields.EncryptedField(base_field=models.JSONField(default=dict, encoder=django.core.serializers.json.DjangoJSONEncoder), editable=True), + ), + migrations.AddField( + model_name='pentestfinding', + name='template_id_new', + field=reportcreator_api.archive.crypto.fields.EncryptedField(base_field=models.UUIDField(blank=True, null=True), blank=True, editable=True, null=True), + ), + migrations.AddField( + model_name='projecttype', + name='report_preview_data_new', + field=reportcreator_api.archive.crypto.fields.EncryptedField(base_field=models.JSONField(default=dict, encoder=django.core.serializers.json.DjangoJSONEncoder), editable=True), + ), + migrations.AddField( + model_name='projecttype', + name='report_styles_new', + field=reportcreator_api.archive.crypto.fields.EncryptedField(base_field=models.TextField(default=''), editable=True), + ), + migrations.AddField( + model_name='projecttype', + name='report_template_new', + field=reportcreator_api.archive.crypto.fields.EncryptedField(base_field=models.TextField(default=''), editable=True), + ), + migrations.AddField( + model_name='uploadedasset', + name='name_new', + field=reportcreator_api.archive.crypto.fields.EncryptedField(base_field=models.CharField(max_length=255, default=''), editable=True), + preserve_default=False, + ), + migrations.AddField( + model_name='uploadedasset', + name='name_hash', + field=models.BinaryField(db_index=True, default=b'', max_length=32), + preserve_default=False, + ), + migrations.AddField( + model_name='uploadedimage', + name='name_new', + field=reportcreator_api.archive.crypto.fields.EncryptedField(base_field=models.CharField(max_length=255, default=''), editable=True), + preserve_default=False, + ), + migrations.AddField( + model_name='uploadedimage', + name='name_hash', + field=models.BinaryField(db_index=True, default=b'', max_length=32), + preserve_default=False, + ), + migrations.AlterUniqueTogether( + name='uploadedasset', + unique_together=set(), + ), + migrations.AlterUniqueTogether( + name='uploadedimage', + unique_together=set(), + ), + ] \ No newline at end of file diff --git a/api/src/reportcreator_api/pentests/migrations/0025_db_encryption_2.py b/api/src/reportcreator_api/pentests/migrations/0025_db_encryption_2.py new file mode 100644 index 0000000..c7ebfa7 --- /dev/null +++ b/api/src/reportcreator_api/pentests/migrations/0025_db_encryption_2.py @@ -0,0 +1,99 @@ +# Generated by Django 4.1.2 on 2022-10-27 11:17 + +import hashlib +from django.db import migrations, models +from reportcreator_api.pentests import cvss +from reportcreator_api.utils.utils import omit_keys + + +def migrate_to_encrypted(apps, schema_editor): + PentestProject = apps.get_model('pentests', 'PentestProject') + PentestFinding = apps.get_model('pentests', 'PentestFinding') + ProjectType = apps.get_model('pentests', 'ProjectType') + UploadedAsset = apps.get_model('pentests', 'UploadedAsset') + UploadedImage = apps.get_model('pentests', 'UploadedImage') + + projects = list(PentestProject.objects.all()) + for p in projects: + p.custom_fields_new = p.custom_fields + PentestProject.objects.bulk_update(projects, ['custom_fields_new']) + + findings = list(PentestFinding.objects.all()) + for f in findings: + f.template_id_new = f.template_id + f.custom_fields_new = f.custom_fields | { + 'title': f.title, + 'cvss': f.cvss, + } + PentestFinding.objects.bulk_update(findings, ['custom_fields_new', 'template_id_new']) + + project_types = list(ProjectType.objects.all()) + for pt in project_types: + pt.report_template_new = pt.report_template + pt.report_styles_new = pt.report_styles + pt.report_preview_data_new = pt.report_preview_data + ProjectType.objects.bulk_update(project_types, ['report_template_new', 'report_styles_new', 'report_preview_data_new']) + + images = list(UploadedImage.objects.all()) + for i in images: + i.name_new = i.name + i.name_hash = hashlib.sha3_256(i.name.encode()).digest() + UploadedImage.objects.bulk_update(images, ['name_new', 'name_hash']) + + assets = list(UploadedAsset.objects.all()) + for a in assets: + a.name_new = a.name + a.name_hash = hashlib.sha3_256(a.name.encode()).digest() + UploadedAsset.objects.bulk_update(assets, ['name_new', 'name_hash']) + + +def reverse_migrate_from_encrypted(apps, schema_editor): + PentestProject = apps.get_model('pentests', 'PentestProject') + PentestFinding = apps.get_model('pentests', 'PentestFinding') + FindingTemplate = apps.get_model('pentests', 'FindingTemplate') + ProjectType = apps.get_model('pentests', 'ProjectType') + UploadedAsset = apps.get_model('pentests', 'UploadedAsset') + UploadedImage = apps.get_model('pentests', 'UploadedImage') + + projects = list(PentestProject.objects.all()) + for p in projects: + p.custom_fields = p.custom_fields_new + PentestProject.objects.bulk_update(projects, ['custom_fields']) + + findings = list(PentestFinding.objects.all()) + for f in findings: + f.title = f.custom_fields_new['title'] + f.cvss = f.custom_fields_new['cvss'] + f.risk_score = cvss.calculate_score(f.cvss) + f.risk_level = cvss.level_from_score(f.risk_score) + f.template = FindingTemplate.objects.filter(id=f.template_id_new).first() + f.custom_fields = omit_keys(f.custom_fields_new, ['title', 'cvss']) + PentestFinding.objects.bulk_update(findings, ['custom_fields', 'title', 'cvss', 'risk_score', 'risk_level', 'template']) + + project_types = list(ProjectType.objects.all()) + for pt in project_types: + pt.report_template = pt.report_template_new + pt.report_styles = pt.report_styles_new + pt.report_preview_data = pt.report_preview_data_new + ProjectType.objects.bulk_update(project_types, ['report_template', 'report_styles', 'report_preview_data']) + + images = list(UploadedImage.objets.all()) + for i in images: + i.name = i.name_new + UploadedImage.objects.bulk_update(images, ['name']) + + assets = list(UploadedAsset.objets.all()) + for a in assets: + a.name = a.name_new + UploadedAsset.objects.bulk_update(assets, ['name']) + + +class Migration(migrations.Migration): + + dependencies = [ + ('pentests', '0025_db_encryption_1'), + ] + + operations = [ + migrations.RunPython(code=migrate_to_encrypted, reverse_code=reverse_migrate_from_encrypted), + ] diff --git a/api/src/reportcreator_api/pentests/migrations/0025_db_encryption_3.py b/api/src/reportcreator_api/pentests/migrations/0025_db_encryption_3.py new file mode 100644 index 0000000..cd642cf --- /dev/null +++ b/api/src/reportcreator_api/pentests/migrations/0025_db_encryption_3.py @@ -0,0 +1,124 @@ +# Generated by Django 4.1.2 on 2022-10-27 12:25 + +from django.db import migrations, models +import reportcreator_api.archive.crypto.fields + + +class Migration(migrations.Migration): + + dependencies = [ + ('pentests', '0025_db_encryption_2'), + ] + + operations = [ + migrations.AlterModelOptions( + name='pentestfinding', + options={'ordering': ['-created']}, + ), + migrations.RemoveField( + model_name='pentestfinding', + name='template', + ), + migrations.RemoveField( + model_name='pentestfinding', + name='title', + ), + migrations.RemoveField( + model_name='pentestfinding', + name='cvss', + ), + migrations.RemoveField( + model_name='pentestfinding', + name='risk_level', + ), + migrations.RemoveField( + model_name='pentestfinding', + name='risk_score', + ), + migrations.RemoveField( + model_name='pentestfinding', + name='custom_fields', + ), + migrations.RemoveField( + model_name='projecttype', + name='report_preview_data', + ), + migrations.RemoveField( + model_name='projecttype', + name='report_styles', + ), + migrations.RemoveField( + model_name='projecttype', + name='report_template', + ), + migrations.RemoveField( + model_name='pentestproject', + name='custom_fields', + ), + migrations.RemoveField( + model_name='uploadedasset', + name='name', + ), + migrations.RemoveField( + model_name='uploadedimage', + name='name', + ), + migrations.RenameField( + model_name='pentestfinding', + old_name='template_id_new', + new_name='template_id', + ), + migrations.RenameField( + model_name='pentestfinding', + old_name='custom_fields_new', + new_name='custom_fields', + ), + migrations.RenameField( + model_name='pentestproject', + old_name='custom_fields_new', + new_name='custom_fields', + ), + migrations.RenameField( + model_name='projecttype', + old_name='report_preview_data_new', + new_name='report_preview_data', + ), + migrations.RenameField( + model_name='projecttype', + old_name='report_styles_new', + new_name='report_styles', + ), + migrations.RenameField( + model_name='projecttype', + old_name='report_template_new', + new_name='report_template', + ), + migrations.RenameField( + model_name='uploadedimage', + old_name='name_new', + new_name='name', + ), + migrations.RenameField( + model_name='uploadedasset', + old_name='name_new', + new_name='name', + ), + migrations.AlterField( + model_name='uploadedasset', + name='name', + field=reportcreator_api.archive.crypto.fields.EncryptedField(base_field=models.CharField(max_length=255), editable=True), + ), + migrations.AlterField( + model_name='uploadedimage', + name='name', + field=reportcreator_api.archive.crypto.fields.EncryptedField(base_field=models.CharField(max_length=255), editable=True), + ), + migrations.AlterUniqueTogether( + name='uploadedasset', + unique_together={('linked_object', 'name_hash')}, + ), + migrations.AlterUniqueTogether( + name='uploadedimage', + unique_together={('linked_object', 'name_hash')}, + ), + ] diff --git a/api/src/reportcreator_api/pentests/migrations/0026_stringfield_spellcheck.py b/api/src/reportcreator_api/pentests/migrations/0026_stringfield_spellcheck.py new file mode 100644 index 0000000..7a3d4cf --- /dev/null +++ b/api/src/reportcreator_api/pentests/migrations/0026_stringfield_spellcheck.py @@ -0,0 +1,36 @@ +# Generated by Django 4.1.2 on 2022-10-27 12:25 + +from django.db import migrations +from reportcreator_api.pentests.customfields.types import parse_field_definition, field_definition_to_dict + + +def migrate_enable_spellcheck(apps, schema_editor): + ProjectType = apps.get_model('pentests', 'ProjectType') + + update_predefined_finding_fields = ['title', 'precondition', 'short_recommendation'] + update_predefined_report_fields = ['title'] + for pt in ProjectType.objects.all(): + finding_fields = parse_field_definition(pt.finding_fields) + for f in update_predefined_finding_fields: + if f in finding_fields: + finding_fields[f].spellcheck = True + pt.finding_fields = field_definition_to_dict(finding_fields) + + report_fields = parse_field_definition(pt.report_fields) + for f in update_predefined_report_fields: + if f in report_fields: + report_fields[f].spellcheck = True + pt.report_fields = field_definition_to_dict(report_fields) + + pt.save() + + +class Migration(migrations.Migration): + + dependencies = [ + ('pentests', '0025_db_encryption_3'), + ] + + operations = [ + migrations.RunPython(code=migrate_enable_spellcheck, reverse_code=migrations.RunPython.noop), + ] diff --git a/api/src/reportcreator_api/pentests/migrations/0027_notebookpage_uploadedusernotebookimage_and_more.py b/api/src/reportcreator_api/pentests/migrations/0027_notebookpage_uploadedusernotebookimage_and_more.py new file mode 100644 index 0000000..5f81db6 --- /dev/null +++ b/api/src/reportcreator_api/pentests/migrations/0027_notebookpage_uploadedusernotebookimage_and_more.py @@ -0,0 +1,69 @@ +# Generated by Django 4.1.3 on 2023-02-07 14:26 + +from django.conf import settings +from django.db import migrations, models +import django.db.models.deletion +import reportcreator_api.archive.crypto.fields +import reportcreator_api.pentests.storages +import reportcreator_api.utils.models +import uuid + + +class Migration(migrations.Migration): + + dependencies = [ + migrations.swappable_dependency(settings.AUTH_USER_MODEL), + ('pentests', '0026_stringfield_spellcheck'), + ] + + operations = [ + migrations.CreateModel( + name='NotebookPage', + fields=[ + ('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)), + ('created', models.DateTimeField(default=reportcreator_api.utils.models.now, editable=False)), + ('updated', models.DateTimeField(auto_now=True)), + ('note_id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False)), + ('title', reportcreator_api.archive.crypto.fields.EncryptedField(base_field=models.TextField(default=''), editable=True)), + ('text', reportcreator_api.archive.crypto.fields.EncryptedField(base_field=models.TextField(default=''), editable=True)), + ('checked', models.BooleanField(blank=True, null=True)), + ('emoji', models.CharField(blank=True, max_length=32, null=True)), + ('order', models.PositiveIntegerField()), + ('parent', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='pentests.notebookpage')), + ('project', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='notes', to='pentests.pentestproject')), + ('user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='notes', to=settings.AUTH_USER_MODEL)), + ], + bases=(reportcreator_api.utils.models.ModelDiffMixin, models.Model), + ), + migrations.CreateModel( + name='UploadedUserNotebookImage', + fields=[ + ('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)), + ('created', models.DateTimeField(default=reportcreator_api.utils.models.now, editable=False)), + ('updated', models.DateTimeField(auto_now=True)), + ('name', reportcreator_api.archive.crypto.fields.EncryptedField(base_field=models.CharField(max_length=255), editable=True)), + ('name_hash', models.BinaryField(db_index=True, max_length=32)), + ('file', models.ImageField(storage=reportcreator_api.pentests.storages.get_uploaded_image_storage, upload_to='')), + ('linked_object', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='images', to=settings.AUTH_USER_MODEL)), + ('uploaded_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL)), + ], + options={ + 'ordering': ['-created'], + 'abstract': False, + 'unique_together': {('linked_object', 'name_hash')}, + }, + bases=(reportcreator_api.utils.models.ModelDiffMixin, models.Model), + ), + migrations.AddConstraint( + model_name='notebookpage', + constraint=models.CheckConstraint(check=models.Q(models.Q(('project__isnull', False), ('user__isnull', True)), models.Q(('project__isnull', True), ('user__isnull', False)), _connector='OR'), name='has_project_or_user'), + ), + migrations.AddConstraint( + model_name='notebookpage', + constraint=models.UniqueConstraint(condition=models.Q(('project__isnull', False)), fields=('project', 'note_id'), name='unique_note_id_per_project'), + ), + migrations.AddConstraint( + model_name='notebookpage', + constraint=models.UniqueConstraint(condition=models.Q(('user__isnull', False)), fields=('user', 'note_id'), name='unique_note_id_per_user'), + ), + ] diff --git a/api/src/reportcreator_api/pentests/migrations/0028_uploadedprojectfile.py b/api/src/reportcreator_api/pentests/migrations/0028_uploadedprojectfile.py new file mode 100644 index 0000000..20a0b2b --- /dev/null +++ b/api/src/reportcreator_api/pentests/migrations/0028_uploadedprojectfile.py @@ -0,0 +1,39 @@ +# Generated by Django 4.2a1 on 2023-02-13 11:36 + +from django.conf import settings +from django.db import migrations, models +import django.db.models.deletion +import reportcreator_api.archive.crypto.fields +import reportcreator_api.pentests.storages +import reportcreator_api.utils.models +import uuid + + +class Migration(migrations.Migration): + + dependencies = [ + migrations.swappable_dependency(settings.AUTH_USER_MODEL), + ('pentests', '0027_notebookpage_uploadedusernotebookimage_and_more'), + ] + + operations = [ + migrations.CreateModel( + name='UploadedProjectFile', + fields=[ + ('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)), + ('created', models.DateTimeField(default=reportcreator_api.utils.models.now, editable=False)), + ('updated', models.DateTimeField(auto_now=True)), + ('name', reportcreator_api.archive.crypto.fields.EncryptedField(base_field=models.CharField(max_length=255), editable=True)), + ('name_hash', models.BinaryField(db_index=True, max_length=32)), + ('file', models.FileField(storage=reportcreator_api.pentests.storages.get_uploaded_file_storage, upload_to='')), + ('linked_object', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='files', to='pentests.pentestproject')), + ('uploaded_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL)), + ], + options={ + 'ordering': ['-created'], + 'abstract': False, + 'unique_together': {('linked_object', 'name_hash')}, + }, + bases=(reportcreator_api.utils.models.ModelDiffMixin, models.Model), + ), + ] diff --git a/api/src/reportcreator_api/pentests/migrations/0029_rename_emoji_notebookpage_status_emoji_and_more.py b/api/src/reportcreator_api/pentests/migrations/0029_rename_emoji_notebookpage_status_emoji_and_more.py new file mode 100644 index 0000000..82b5a54 --- /dev/null +++ b/api/src/reportcreator_api/pentests/migrations/0029_rename_emoji_notebookpage_status_emoji_and_more.py @@ -0,0 +1,23 @@ +# Generated by Django 4.2a1 on 2023-02-22 15:05 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('pentests', '0028_uploadedprojectfile'), + ] + + operations = [ + migrations.RenameField( + model_name='notebookpage', + old_name='emoji', + new_name='status_emoji', + ), + migrations.AddField( + model_name='notebookpage', + name='icon_emoji', + field=models.CharField(blank=True, max_length=32, null=True), + ), + ] diff --git a/api/src/reportcreator_api/pentests/migrations/0030_alter_findingtemplate_source_and_more.py b/api/src/reportcreator_api/pentests/migrations/0030_alter_findingtemplate_source_and_more.py new file mode 100644 index 0000000..84904d2 --- /dev/null +++ b/api/src/reportcreator_api/pentests/migrations/0030_alter_findingtemplate_source_and_more.py @@ -0,0 +1,59 @@ +# Generated by Django 4.2b1 on 2023-03-02 13:24 + +from django.db import migrations, models +import reportcreator_api.pentests.querysets + + +def snapshot_project_types(apps, schema_editor): + PentestProject = apps.get_model('pentests', 'PentestProject') + UploadedAsset = apps.get_model('pentests', 'UploadedAsset') + + for p in PentestProject.objects.filter(project_type__linked_project=None).select_related('project_type'): + # Copy ProjectType + pt = p.project_type + assets = list(pt.assets.all()) + pt.pk = None + pt.lock_info_data = None + pt.linked_project = p + pt.source = 'snapshot' + pt.save() + for a in assets: + a.pk = None + a.linked_object = pt + UploadedAsset.objects.bulk_create(assets) + + # Update project + p.project_type = pt + p.save(update_fields=['project_type']) + + +class Migration(migrations.Migration): + + dependencies = [ + ('pentests', '0029_rename_emoji_notebookpage_status_emoji_and_more'), + ] + + operations = [ + migrations.AlterField( + model_name='findingtemplate', + name='source', + field=models.CharField(choices=[('created', 'Created'), ('imported', 'Imported'), ('imported_dependency', 'Imported Dependency'), ('customized', 'Customized'), ('snapshot', 'Snapshot')], db_index=True, default='created', editable=False, max_length=50), + ), + migrations.AlterField( + model_name='pentestproject', + name='source', + field=models.CharField(choices=[('created', 'Created'), ('imported', 'Imported'), ('imported_dependency', 'Imported Dependency'), ('customized', 'Customized'), ('snapshot', 'Snapshot')], db_index=True, default='created', editable=False, max_length=50), + ), + migrations.AlterField( + model_name='projecttype', + name='source', + field=models.CharField(choices=[('created', 'Created'), ('imported', 'Imported'), ('imported_dependency', 'Imported Dependency'), ('customized', 'Customized'), ('snapshot', 'Snapshot')], db_index=True, default='created', editable=False, max_length=50), + ), + migrations.AlterModelManagers( + name='projecttype', + managers=[ + ('objects', reportcreator_api.pentests.querysets.ProjectTypeManager()), + ], + ), + migrations.RunPython(code=snapshot_project_types, reverse_code=migrations.RunPython.noop), + ] diff --git a/api/src/reportcreator_api/pentests/migrations/0031_projecttype_linked_user.py b/api/src/reportcreator_api/pentests/migrations/0031_projecttype_linked_user.py new file mode 100644 index 0000000..4f43f9b --- /dev/null +++ b/api/src/reportcreator_api/pentests/migrations/0031_projecttype_linked_user.py @@ -0,0 +1,25 @@ +# Generated by Django 4.2b1 on 2023-03-06 08:49 + +from django.conf import settings +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + + dependencies = [ + migrations.swappable_dependency(settings.AUTH_USER_MODEL), + ('pentests', '0030_alter_findingtemplate_source_and_more'), + ] + + operations = [ + migrations.AddField( + model_name='projecttype', + name='linked_user', + field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL), + ), + migrations.AddConstraint( + model_name='projecttype', + constraint=models.CheckConstraint(check=models.Q(('linked_project', None), ('linked_user', None), _connector='OR'), name='linked_project_or_user'), + ), + ] diff --git a/api/src/reportcreator_api/pentests/migrations/0032_archivedproject_archivedprojectkeypart_userpublickey_and_more.py b/api/src/reportcreator_api/pentests/migrations/0032_archivedproject_archivedprojectkeypart_userpublickey_and_more.py new file mode 100644 index 0000000..26fe170 --- /dev/null +++ b/api/src/reportcreator_api/pentests/migrations/0032_archivedproject_archivedprojectkeypart_userpublickey_and_more.py @@ -0,0 +1,89 @@ +# Generated by Django 4.2b1 on 2023-03-22 09:54 + +from django.conf import settings +from django.db import migrations, models +import django.db.models.deletion +import pathlib +import reportcreator_api.archive.crypto.fields +import reportcreator_api.utils.models +import reportcreator_api.utils.storages +import uuid + + +class Migration(migrations.Migration): + + dependencies = [ + migrations.swappable_dependency(settings.AUTH_USER_MODEL), + ('pentests', '0031_projecttype_linked_user'), + ] + + operations = [ + migrations.CreateModel( + name='ArchivedProject', + fields=[ + ('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)), + ('created', models.DateTimeField(default=reportcreator_api.utils.models.now, editable=False)), + ('updated', models.DateTimeField(auto_now=True)), + ('name', models.CharField(db_index=True, max_length=255)), + ('threshold', models.PositiveIntegerField()), + ('file', models.FileField(storage=reportcreator_api.utils.storages.UnencryptedFileSystemStorage(access_key='', bucket_name='', endpoint_url='', location=pathlib.PurePosixPath('/data/archivedfiles'), secret_key=''), upload_to='')), + ], + options={ + 'ordering': ['-created'], + 'abstract': False, + }, + bases=(reportcreator_api.utils.models.ModelDiffMixin, models.Model), + ), + migrations.CreateModel( + name='ArchivedProjectKeyPart', + fields=[ + ('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)), + ('created', models.DateTimeField(default=reportcreator_api.utils.models.now, editable=False)), + ('updated', models.DateTimeField(auto_now=True)), + ('key_part', reportcreator_api.archive.crypto.fields.EncryptedField(base_field=models.JSONField(null=True, blank=True), editable=True, null=True, blank=True)), + ('encrypted_key_part', models.BinaryField()), + ('decrypted_at', models.DateTimeField(blank=True, db_index=True, null=True)), + ('archived_project', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='key_parts', to='pentests.archivedproject')), + ('user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.PROTECT, to=settings.AUTH_USER_MODEL)), + ], + options={ + 'ordering': ['-created'], + 'abstract': False, + }, + bases=(reportcreator_api.utils.models.ModelDiffMixin, models.Model), + ), + migrations.CreateModel( + name='UserPublicKey', + fields=[ + ('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)), + ('created', models.DateTimeField(default=reportcreator_api.utils.models.now, editable=False)), + ('updated', models.DateTimeField(auto_now=True)), + ('name', models.CharField(max_length=255)), + ('enabled', models.BooleanField(db_index=True, default=True)), + ('public_key', reportcreator_api.archive.crypto.fields.EncryptedField(base_field=models.TextField(), editable=True)), + ('public_key_info', models.JSONField()), + ('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='public_keys', to=settings.AUTH_USER_MODEL)), + ], + options={ + 'ordering': ['-created'], + 'abstract': False, + }, + bases=(reportcreator_api.utils.models.ModelDiffMixin, models.Model), + ), + migrations.CreateModel( + name='ArchivedProjectPublicKeyEncryptedKeyPart', + fields=[ + ('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)), + ('created', models.DateTimeField(default=reportcreator_api.utils.models.now, editable=False)), + ('updated', models.DateTimeField(auto_now=True)), + ('encrypted_data', reportcreator_api.archive.crypto.fields.EncryptedField(base_field=models.CharField(), editable=True)), + ('key_part', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='public_key_encrypted_parts', to='pentests.archivedprojectkeypart')), + ('public_key', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='pentests.userpublickey')), + ], + options={ + 'ordering': ['-created'], + 'abstract': False, + }, + bases=(reportcreator_api.utils.models.ModelDiffMixin, models.Model), + ), + ] diff --git a/api/src/reportcreator_api/pentests/migrations/__init__.py b/api/src/reportcreator_api/pentests/migrations/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/api/src/reportcreator_api/pentests/models/__init__.py b/api/src/reportcreator_api/pentests/models/__init__.py new file mode 100644 index 0000000..07bf270 --- /dev/null +++ b/api/src/reportcreator_api/pentests/models/__init__.py @@ -0,0 +1,15 @@ +from .common import LockInfo, LockStatus, SourceEnum, Language, ReviewStatus, LockableMixin +from .template import FindingTemplate +from .project import ProjectType, ProjectTypeScope, PentestProject, ProjectMemberInfo, ProjectMemberRole, ReportSection, PentestFinding +from .notes import NotebookPage +from .archive import ArchivedProject, UserPublicKey, ArchivedProjectKeyPart, ArchivedProjectPublicKeyEncryptedKeyPart +from .files import UploadedFileBase, UploadedAsset, UploadedImage, UploadedProjectFile, UploadedUserNotebookImage + +__all__ = [ + 'LockInfo', 'LockStatus', 'SourceEnum', 'Language', 'ReviewStatus', 'LockableMixin', + 'FindingTemplate', + 'ProjectType', 'ProjectTypeScope', 'PentestProject', 'ProjectMemberInfo', 'ProjectMemberRole', 'ReportSection', 'PentestFinding', + 'NotebookPage', + 'ArchivedProject', 'UserPublicKey', 'ArchivedProjectKeyPart', 'ArchivedProjectPublicKeyEncryptedKeyPart', + 'UploadedFileBase', 'UploadedAsset', 'UploadedImage', 'UploadedProjectFile', 'UploadedUserNotebookImage', +] diff --git a/api/src/reportcreator_api/pentests/models/archive.py b/api/src/reportcreator_api/pentests/models/archive.py new file mode 100644 index 0000000..930444b --- /dev/null +++ b/api/src/reportcreator_api/pentests/models/archive.py @@ -0,0 +1,55 @@ +from django.db import models +from reportcreator_api.archive.crypto import pgp + +from reportcreator_api.archive.crypto.fields import EncryptedField +from reportcreator_api.pentests import querysets, storages +from reportcreator_api.users.models import PentestUser +from reportcreator_api.utils.models import BaseModel + + +class UserPublicKey(BaseModel): + user = models.ForeignKey(to=PentestUser, on_delete=models.CASCADE, related_name='public_keys') + name = models.CharField(max_length=255) + enabled = models.BooleanField(default=True, db_index=True) + public_key = EncryptedField(base_field=models.TextField()) + public_key_info = models.JSONField() + + objects = querysets.UserPublicKeyManager() + + def encrypt(self, data): + return pgp.encrypt(data=data, public_key=self.public_key) + + +class ArchivedProject(BaseModel): + """ + An archived project that is encrypted using shamir secret sharing. + Each user gets 1 key part. + The user key part is encrypted with all public keys of this user. + """ + name = models.CharField(max_length=255, null=False, blank=False, db_index=True) + threshold = models.PositiveIntegerField() + file = models.FileField(storage=storages.get_archive_file_storage()) + + objects = querysets.ArchivedProjectManager() + + +class ArchivedProjectKeyPart(BaseModel): + archived_project = models.ForeignKey(to=ArchivedProject, on_delete=models.CASCADE, related_name='key_parts') + user = models.ForeignKey(to=PentestUser, on_delete=models.PROTECT, null=True, blank=False) + + encrypted_key_part = models.BinaryField() + key_part = EncryptedField(base_field=models.JSONField(null=True, blank=True), null=True, blank=True) + decrypted_at = models.DateTimeField(null=True, blank=True, db_index=True) + + @property + def is_decrypted(self): + return bool(self.decrypted_at) + + +class ArchivedProjectPublicKeyEncryptedKeyPart(BaseModel): + key_part = models.ForeignKey(to=ArchivedProjectKeyPart, on_delete=models.CASCADE, related_name='public_key_encrypted_parts') + public_key = models.ForeignKey(to=UserPublicKey, on_delete=models.PROTECT) + + # Shamir key part encrypted by public key + encrypted_data = EncryptedField(base_field=models.CharField()) + diff --git a/api/src/reportcreator_api/pentests/models/common.py b/api/src/reportcreator_api/pentests/models/common.py new file mode 100644 index 0000000..4efbe02 --- /dev/null +++ b/api/src/reportcreator_api/pentests/models/common.py @@ -0,0 +1,132 @@ +import enum +from django.conf import settings +from django.db import models, transaction, IntegrityError +from django.contrib.contenttypes.models import ContentType +from django.utils import timezone +from django.utils.translation import gettext_lazy as _ + +from reportcreator_api.utils.models import BaseModel +from reportcreator_api.users.models import PentestUser +from reportcreator_api.utils.relations import GenericOneToOneForeignKey, GenericOneToOneRelation + + +class LockStatus(enum.Enum): + CREATED = 'created' + REFRESHED = 'refreshed' + FAILED = 'failed' + + +class LockInfo(BaseModel): + # Generic foreign key to arbitrary/multiple models + content_type = models.ForeignKey(to=ContentType, on_delete=models.CASCADE) + object_id = models.UUIDField() + locked_object = GenericOneToOneForeignKey(ct_field='content_type', fk_field='object_id') + + last_ping = models.DateTimeField(default=timezone.now) + user = models.ForeignKey(to=PentestUser, on_delete=models.CASCADE, null=False) + + class Meta: + unique_together = [('content_type', 'object_id')] + + @property + def expires(self): + return (self.last_ping or timezone.now()) + settings.MAX_LOCK_TIME + + def refresh_lock(self): + try: + self.last_ping = timezone.now() + self.save(force_update=True) + return LockStatus.REFRESHED + except self.DoesNotExist: + return LockStatus.FAILED + + +class LockableMixin(models.Model): + lock_info_data = GenericOneToOneRelation(to=LockInfo) + + class Meta: + abstract = True + + @property + def lock_info(self): + if not self.is_locked: + return None + return self.lock_info_data + + @property + def is_locked(self): + return self.lock_info_data is not None and timezone.now() <= self.lock_info_data.expires + + def _cleanup_old_lock(self): + if self.lock_info_data: + self.lock_info_data.delete() + self.lock_info_data = None + + def lock(self, user, refresh_lock=True): + if self.is_locked and self.lock_info.user != user: + # Already locked by another user + return LockStatus.FAILED + elif self.is_locked and self.lock_info.user == user: + # Refresh lock such that it does not expire + if refresh_lock: + return self.lock_info.refresh_lock() + else: + return LockStatus.REFRESHED + elif not self.is_locked: + with transaction.atomic(): + self._cleanup_old_lock() + try: + self.lock_info_data = LockInfo.objects.create(locked_object=self, user=user) + return LockStatus.CREATED + except IntegrityError: + self.lock_info_data = LockInfo.objects.get(content_type=ContentType.objects.get_for_model(self), object_id=self.id) + if self.lock_info_data.user == user: + return LockStatus.REFRESHED + else: + return LockStatus.FAILED + return LockStatus.FAILED + + def unlock(self, user): + if not self.is_locked: + self._cleanup_old_lock() + return True + elif self.is_locked and self.lock_info.user == user: + self._cleanup_old_lock() + return True + else: + return False + + +class SourceEnum(models.TextChoices): + CREATED = 'created', 'Created' + IMPORTED = 'imported', 'Imported' + IMPORTED_DEPENDENCY = 'imported_dependency', 'Imported Dependency' + CUSTOMIZED = 'customized', 'Customized' + SNAPSHOT = 'snapshot', 'Snapshot' + + +class ImportableMixin(models.Model): + source = models.CharField(max_length=50, choices=SourceEnum.choices, default=SourceEnum.CREATED, db_index=True, editable=False) + + class Meta: + abstract = True + + +class ReviewStatus(models.TextChoices): + IN_PROGRESS = 'in-progress', _('In progress') + READY_FOR_REVIEW = 'ready-for-review', _('Ready for review') + NEEDS_IMPROVEMENT = 'needs-improvement', _('Needs improvement') + FINISHED = 'finished', _('Finished') + + +class Language(models.TextChoices): + ENGLISH = 'en-US', 'English' + GERMAN = 'de-DE', 'German' + + +class LanguageMixin(models.Model): + language = models.CharField(choices=Language.choices, default=Language.GERMAN, max_length=5, db_index=True) + + class Meta: + abstract = True + diff --git a/api/src/reportcreator_api/pentests/models/files.py b/api/src/reportcreator_api/pentests/models/files.py new file mode 100644 index 0000000..fc1d829 --- /dev/null +++ b/api/src/reportcreator_api/pentests/models/files.py @@ -0,0 +1,55 @@ +import hashlib +from django.db import models +from reportcreator_api.archive.crypto.fields import EncryptedField +from reportcreator_api.pentests import querysets, storages +from reportcreator_api.users.models import PentestUser + +from reportcreator_api.utils.models import BaseModel + + +class UploadedFileBase(BaseModel): + file = models.FileField() + name = EncryptedField(base_field=models.CharField(max_length=255)) + name_hash = models.BinaryField(max_length=32, db_index=True) + uploaded_by = models.ForeignKey(to=PentestUser, on_delete=models.SET_NULL, null=True) + + class Meta(BaseModel.Meta): + abstract = True + unique_together = [['linked_object', 'name_hash']] + + @classmethod + def hash_name(cls, name) -> bytes: + return hashlib.sha3_256(name.encode()).digest() + + def save(self, *args, **kwargs): + self.name_hash = self.hash_name(self.name) + return super().save(*args, **kwargs) + + +class UploadedImage(UploadedFileBase): + file = models.ImageField(storage=storages.get_uploaded_image_storage) + linked_object = models.ForeignKey(to='PentestProject', on_delete=models.CASCADE, related_name='images') + + objects = querysets.UploadedImageManager() + + +class UploadedAsset(UploadedFileBase): + file = models.FileField(storage=storages.get_uploaded_asset_storage) + linked_object = models.ForeignKey(to='ProjectType', on_delete=models.CASCADE, related_name='assets') + + objects = querysets.UploadedAssetManager() + + +class UploadedUserNotebookImage(UploadedFileBase): + file = models.ImageField(storage=storages.get_uploaded_image_storage) + linked_object = models.ForeignKey(to=PentestUser, on_delete=models.CASCADE, related_name='images') + + objects = querysets.UploadedUserNotebookImageManager() + + +class UploadedProjectFile(UploadedFileBase): + file = models.FileField(storage=storages.get_uploaded_file_storage) + linked_object = models.ForeignKey(to='PentestProject', on_delete=models.CASCADE, related_name='files') + + objects = querysets.UploadedProjectFileManager() + diff --git a/api/src/reportcreator_api/pentests/models/notes.py b/api/src/reportcreator_api/pentests/models/notes.py new file mode 100644 index 0000000..e4f6fc6 --- /dev/null +++ b/api/src/reportcreator_api/pentests/models/notes.py @@ -0,0 +1,45 @@ +from uuid import uuid4 +from django.db import models + +from reportcreator_api.archive.crypto.fields import EncryptedField +from reportcreator_api.pentests.models.common import LockableMixin +from reportcreator_api.users.models import PentestUser +from reportcreator_api.utils.models import BaseModel +from reportcreator_api.pentests import querysets + + +class NotebookPage(LockableMixin, BaseModel): + note_id = models.UUIDField(default=uuid4, db_index=True, editable=False) + title = EncryptedField(base_field=models.TextField(default='')) + text = EncryptedField(base_field=models.TextField(default='')) + checked = models.BooleanField(null=True, blank=True) + icon_emoji = models.CharField(max_length=32, null=True, blank=True) + status_emoji = models.CharField(max_length=32, null=True, blank=True) + + parent = models.ForeignKey(to='pentests.NotebookPage', on_delete=models.CASCADE, null=True, blank=True) + order = models.PositiveIntegerField() + + project = models.ForeignKey(to='PentestProject', on_delete=models.CASCADE, null=True, related_name='notes') + user = models.ForeignKey(to=PentestUser, on_delete=models.CASCADE, null=True, related_name='notes') + + objects = querysets.NotebookPageManager() + + class Meta: + constraints = [ + models.CheckConstraint( + name='has_project_or_user', + check=(models.Q(project__isnull=False) & models.Q(user__isnull=True)) | + (models.Q(project__isnull=True) & models.Q(user__isnull=False)) + ), + models.UniqueConstraint( + name='unique_note_id_per_project', + fields=['project', 'note_id'], + condition=models.Q(project__isnull=False) + ), + models.UniqueConstraint( + name='unique_note_id_per_user', + fields=['user', 'note_id'], + condition=models.Q(user__isnull=False) + ), + ] + diff --git a/api/src/reportcreator_api/pentests/models/project.py b/api/src/reportcreator_api/pentests/models/project.py new file mode 100644 index 0000000..e78a758 --- /dev/null +++ b/api/src/reportcreator_api/pentests/models/project.py @@ -0,0 +1,307 @@ +import itertools +from uuid import uuid4 +from jsonschema import ValidationError +from django.db import models +from django.contrib.postgres.fields import ArrayField +from django.core.serializers.json import DjangoJSONEncoder +from django.utils.translation import gettext_lazy as _ + +from reportcreator_api.archive.crypto.fields import EncryptedField +from reportcreator_api.pentests.customfields.mixins import EncryptedCustomFieldsMixin +from reportcreator_api.pentests.customfields.predefined_fields import FINDING_FIELDS_CORE, FINDING_FIELDS_PREDEFINED, REPORT_FIELDS_CORE, REPORT_FIELDS_PREDEFINED, finding_field_order_default, finding_fields_default, report_fields_default, report_sections_default +from reportcreator_api.pentests.customfields.types import FieldDefinition, field_definition_to_dict, parse_field_definition +from reportcreator_api.pentests.customfields.utils import HandleUndefinedFieldsOptions, ensure_defined_structure, set_field_origin +from reportcreator_api.pentests.customfields.validators import FieldDefinitionValidator, SectionDefinitionValidator +from reportcreator_api.pentests.models.common import ImportableMixin, LanguageMixin, LockableMixin, ReviewStatus +from reportcreator_api.users.models import PentestUser +from reportcreator_api.utils.decorators import cache +from reportcreator_api.utils.error_messages import ErrorMessage +from reportcreator_api.utils.models import BaseModel +from reportcreator_api.pentests import querysets +from reportcreator_api.pentests import cvss as cvss_utils +from reportcreator_api.utils.utils import remove_duplicates + + +class ProjectTypeScope(models.TextChoices): + GLOBAL = 'global', _('Global') + PRIVATE = 'private', _('Private') + PROJECT = 'project', _('Project') + + +class ProjectType(LockableMixin, LanguageMixin, ImportableMixin, BaseModel): + name = models.CharField(max_length=255, null=False, blank=False, db_index=True) + + # PDF Template + report_template = EncryptedField(base_field=models.TextField(default='')) + report_styles = EncryptedField(base_field=models.TextField(default='')) + report_preview_data = EncryptedField(base_field=models.JSONField(encoder=DjangoJSONEncoder, default=dict)) + + # Report + report_fields = models.JSONField( + encoder=DjangoJSONEncoder, + validators=[FieldDefinitionValidator(core_fields=REPORT_FIELDS_CORE, predefined_fields=REPORT_FIELDS_PREDEFINED)], + default=report_fields_default) + report_sections = models.JSONField(encoder=DjangoJSONEncoder, validators=[SectionDefinitionValidator()], default=report_sections_default) + + # Findings + finding_fields = models.JSONField( + encoder=DjangoJSONEncoder, + validators=[FieldDefinitionValidator(core_fields=FINDING_FIELDS_CORE, predefined_fields=FINDING_FIELDS_PREDEFINED)], + default=finding_fields_default) + finding_field_order = models.JSONField(encoder=DjangoJSONEncoder, default=finding_field_order_default) + + linked_project = models.ForeignKey(to='PentestProject', on_delete=models.SET_NULL, null=True, blank=True) + linked_user = models.ForeignKey(to=PentestUser, on_delete=models.CASCADE, null=True, blank=True) + + objects = querysets.ProjectTypeManager() + + class Meta: + constraints = [ + models.CheckConstraint( + name='linked_project_or_user', + check=models.Q(linked_project=None) | models.Q(linked_user=None) + ), + ] + + @property + def finding_fields_obj(self) -> dict[str, FieldDefinition]: + return parse_field_definition(self.finding_fields) + + @property + def report_fields_obj(self) -> dict[str, FieldDefinition]: + return parse_field_definition(self.report_fields) + + @property + def scope(self) -> ProjectTypeScope: + if self.linked_project_id: + return ProjectTypeScope.PROJECT + elif self.linked_user_id: + return ProjectTypeScope.PRIVATE + elif not self.linked_project_id and not self.linked_user_id: + return ProjectTypeScope.GLOBAL + + def __str__(self) -> str: + return self.name + + def clean(self) -> None: + # Validate report sections contain only defined fields + if undefined_fields := set(itertools.chain(*map(lambda s: s['fields'], self.report_sections))) - set(self.report_fields.keys()): + raise ValidationError(_('Unknown fields in section: %(fields)s') % {'fields': list(undefined_fields)}) + + # Validate finding field order contains only defined fields + if undefined_fields := set(self.finding_field_order) - set(self.finding_fields.keys()): + raise ValidationError(_('Unknown fields in finding order: %(fields)s') % {'fields': list(undefined_fields)}) + + def save(self, *args, **kwargs): + # Ensure static fields are marked correctly + self.report_fields = field_definition_to_dict(set_field_origin(self.report_fields_obj, predefined_fields=REPORT_FIELDS_CORE | REPORT_FIELDS_PREDEFINED)) + self.finding_fields = field_definition_to_dict(set_field_origin(self.finding_fields_obj, predefined_fields=FINDING_FIELDS_CORE | FINDING_FIELDS_PREDEFINED)) + + # Ensure report section definition contains all fields + section_fields = set() + for s in self.report_sections: + s['fields'] = remove_duplicates(s['fields']) + section_fields |= set(s['fields']) + report_fields = set(self.report_fields.keys()) + if missing_fields := list(report_fields - section_fields): + others_section = [s for s in self.report_sections if s['id'] == 'other'] + if others_section: + others_section = others_section[0] + else: + others_section = { + 'id': 'other', + 'label': 'Other', + 'fields': [], + } + self.report_sections.append(others_section) + others_section['fields'].extend(missing_fields) + # Remove undefined fields from section definition + for section in self.report_sections: + for undefined_field in set(section['fields']) - report_fields: + section['fields'].remove(undefined_field) + + # Ensure finding order contains all fields + finding_fields = set(self.finding_fields.keys()) + self.finding_field_order = remove_duplicates(self.finding_field_order + list(finding_fields)) + # Remove undefined fields from finding order + for undefined_field in set(self.finding_field_order) - finding_fields: + self.finding_field_order.remove(undefined_field) + + # Ensure correct structure of report_preview_data + if set(self.changed_fields).intersection({'report_preview_data', 'report_fields', 'finding_fields'}): + report_data = self.report_preview_data.get('report') + if not isinstance(report_data, dict): + report_data = {} + + self.report_preview_data['report'] = report_data | ensure_defined_structure( + value=report_data, + definition=self.report_fields_obj, + handle_undefined=HandleUndefinedFieldsOptions.FILL_DEMO_DATA) + findings = self.report_preview_data.get('findings') + if not isinstance(findings, list): + # Generate findings with demo data + # Static values for core fields + findings = [ + {'title': 'First Demo Finding', 'cvss': 'CVSS:3.1/AV:N/AC:L/PR:L/UI:N/S:U/C:H/I:H/A:L'}, + {'title': 'Second Demo Finding', 'cvss': 'CVSS:3.1/AV:N/AC:L/PR:H/UI:N/S:U/C:L/I:L/A:L'}, + ] + self.report_preview_data['findings'] = [ + {'id': str(uuid4())} | f | ensure_defined_structure( + value=f, + definition=self.finding_fields_obj, + handle_undefined=HandleUndefinedFieldsOptions.FILL_DEMO_DATA) + for f in findings if isinstance(f, dict) + ] + + return super().save(*args, **kwargs) + + def copy(self, **kwargs): + return ProjectType.objects.copy(self, **kwargs) + + +class PentestProject(EncryptedCustomFieldsMixin, LanguageMixin, ImportableMixin, BaseModel): + name = models.CharField(max_length=255, null=False, blank=False, db_index=True) + project_type = models.ForeignKey(to='ProjectType', on_delete=models.PROTECT) + imported_members = ArrayField(base_field=models.JSONField(encoder=DjangoJSONEncoder), default=list, blank=True) + + readonly = models.BooleanField(default=False, db_index=True) + + objects = querysets.PentestProjectManager() + + @property + def field_definition(self) -> dict[str, FieldDefinition]: + return self.project_type.report_fields_obj + + def __str__(self) -> str: + return self.name + + def delete(self, using=None, keep_parents=False): + linked_project_types_to_delete = list(ProjectType.objects \ + .filter(linked_project=self) \ + .annotate(used_by_other=models.Count('pentestproject', filter=~models.Q(pentestproject=models.F('linked_project')))) \ + .filter(used_by_other=0) \ + .values_list('id', flat=True)) + out = super().delete(using=using, keep_parents=keep_parents) + ProjectType.objects.filter(id__in=linked_project_types_to_delete).delete() + return out + + def copy(self, **kwargs): + return PentestProject.objects.copy(self, **kwargs) + + def perform_checks(self) -> list[ErrorMessage]: + from reportcreator_api.pentests.checks import run_checks + return list(run_checks(self)) + + +class ProjectMemberRole(BaseModel): + role = models.CharField(max_length=50, unique=True) + default = models.BooleanField(default=False) + + @classmethod + @property + @cache('ProjectMemberRole.predefined_roles', timeout=10) + def predefined_roles(cls): + return ProjectMemberRole.objects.all() + + @classmethod + @property + def default_roles(cls) -> list[str]: + return [r.role for r in cls.predefined_roles if r.default] + + +class ProjectMemberInfo(BaseModel): + project = models.ForeignKey(PentestProject, on_delete=models.CASCADE, related_name='members') + user = models.ForeignKey(PentestUser, on_delete=models.CASCADE) + + roles = ArrayField(base_field=models.CharField(max_length=50, null=False, blank=False), default=list, blank=True) + + class Meta: + unique_together = [('project', 'user')] + + +class ReportSection(LockableMixin, BaseModel): + """ + This model stores section related meta information in the DB. + The actual section data is stored in project.custom_fields. + Storing everything in the ReportSection DB model would require complex manipulation of model data when section definitions change + (e.g. field moved to another section, field deleted form definition => keep value of section, etc.). + But storing per-section information (e.g. locking, status tracking and assigning pentesters) is easier on a DB object. + Therefore some parts are stored in a separate model (everything that is not influenced by section and field definition), + but the data still remains in the project.data JSON object. + """ + project = models.ForeignKey(to=PentestProject, on_delete=models.CASCADE, null=False, related_name='sections') + section_id = models.CharField(max_length=255, null=False, db_index=True, editable=False) + + assignee = models.ForeignKey(to=PentestUser, on_delete=models.SET_NULL, null=True, blank=True) + status = models.CharField(max_length=20, choices=ReviewStatus.choices, default=ReviewStatus.IN_PROGRESS, db_index=True) + + objects = models.Manager.from_queryset(querysets.ReportSectionQueryset)() + + class Meta(BaseModel.Meta): + unique_together = [('project', 'section_id')] + + @property + def project_type(self): + return self.project.project_type + + @property + def section_definition(self): + return next(filter(lambda s: s.get('id') == self.section_id, self.project_type.report_sections), {}) + + @property + def section_label(self): + return self.section_definition.get('label') or '' + + @property + def section_fields(self): + return self.section_definition.get('fields', []) + + @property + def field_definition(self) -> dict[str, FieldDefinition]: + return {f: self.project_type.report_fields_obj.get(f) for f in self.section_fields} + + @property + def data(self): + report_data = self.project.data + return {f: report_data.get(f) for f in self.section_fields} + + @property + def language(self): + return self.project.language + + def update_data(self, value): + self.project.update_data(value) + + +class PentestFinding(EncryptedCustomFieldsMixin, LockableMixin, BaseModel): + finding_id = models.UUIDField(default=uuid4, db_index=True, editable=False) + project = models.ForeignKey(to=PentestProject, on_delete=models.CASCADE, null=False, related_name='findings') + + template_id = EncryptedField(base_field=models.UUIDField(null=True, blank=True), null=True, blank=True) + assignee = models.ForeignKey(to=PentestUser, on_delete=models.SET_NULL, null=True, blank=True) + status = models.CharField(max_length=20, choices=ReviewStatus.choices, default=ReviewStatus.IN_PROGRESS, db_index=True) + + objects = models.Manager.from_queryset(querysets.PentestFindingQueryset)() + + class Meta(BaseModel.Meta): + unique_together = [('project', 'finding_id')] + + @property + def field_definition(self) -> dict[str, FieldDefinition]: + return self.project.project_type.finding_fields_obj + + @property + def language(self): + return self.project.language + + @property + def title(self): + return self.data.get('title') + + @property + def risk_score(self): + return cvss_utils.calculate_score(self.data.get('cvss')) + + def __str__(self) -> str: + return self.title + diff --git a/api/src/reportcreator_api/pentests/models/template.py b/api/src/reportcreator_api/pentests/models/template.py new file mode 100644 index 0000000..fb4bd95 --- /dev/null +++ b/api/src/reportcreator_api/pentests/models/template.py @@ -0,0 +1,48 @@ +from django.db import models +from django.contrib.postgres.fields import ArrayField + +from reportcreator_api.utils.models import BaseModel +from reportcreator_api.pentests.customfields.mixins import CustomFieldsMixin +from reportcreator_api.pentests.models.common import LockableMixin, ImportableMixin, ReviewStatus, LanguageMixin +from reportcreator_api.pentests import querysets +from reportcreator_api.pentests import cvss as cvss_utils +from reportcreator_api.pentests.customfields.types import FieldDefinition +from reportcreator_api.pentests.customfields.predefined_fields import FINDING_FIELDS_CORE, FINDING_FIELDS_PREDEFINED +from reportcreator_api.utils.decorators import cache + + +class FindingTemplate(CustomFieldsMixin, LockableMixin, LanguageMixin, ImportableMixin, BaseModel): + usage_count = models.PositiveIntegerField(default=0, db_index=True) + tags = ArrayField( + base_field=models.CharField(max_length=255), + default=list, db_index=True) + status = models.CharField(max_length=20, choices=ReviewStatus.choices, default=ReviewStatus.IN_PROGRESS, db_index=True) + + title = models.TextField(default='', db_index=True) + + cvss = models.CharField(max_length=255, default='n/a') + risk_score = models.FloatField(default=0.0, db_index=True) + risk_level = models.CharField(max_length=10, choices=cvss_utils.CVSSLevel.choices, default=cvss_utils.CVSSLevel.INFO, db_index=True) + + objects = models.Manager.from_queryset(querysets.FindingTemplateQueryset)() + + @classmethod + @property + @cache('FindingTemplate.field_definition', timeout=10) + def field_definition(cls) -> dict[str, FieldDefinition]: + return FindingTemplate.objects.get_field_definition() + + @property + def core_field_names(self) -> list[str]: + return list(FINDING_FIELDS_CORE.keys()) + + def __str__(self) -> str: + return self.title + + def save(self, *args, **kwargs): + # Update risk score and level + self.risk_score = cvss_utils.calculate_score(self.cvss) + self.risk_level = cvss_utils.level_from_score(self.risk_score) + + return super().save(*args, **kwargs) + diff --git a/api/src/reportcreator_api/pentests/permissions.py b/api/src/reportcreator_api/pentests/permissions.py new file mode 100644 index 0000000..5bc6c6d --- /dev/null +++ b/api/src/reportcreator_api/pentests/permissions.py @@ -0,0 +1,112 @@ +from django.conf import settings +from rest_framework import permissions + +from reportcreator_api.utils import license +from reportcreator_api.pentests.models import ProjectTypeScope, SourceEnum + + +class IsAuthenticatedOrRetrieve(permissions.IsAuthenticated): + def has_permission(self, request, view): + if view.action and view.action.startswith('retrieve'): + return True + return super().has_permission(request, view) + + +class ProjectTypePermissions(permissions.BasePermission): + # Read-only actions that can be performed by anyone having read permissions + # and actions for private designs (permission check done in serializer to allow only private designs) + public_actions = ['preview', 'export', 'copy', 'create'] + # Write actions on a ProjectType instance + private_actions = ['update', 'partial_update', 'destroy', 'lock', 'unlock'] + # All other actions are only accessible for users with designer permissions + + def has_permission(self, request, view): + if request.user.is_admin or request.user.is_designer: + return True + if request.method in permissions.SAFE_METHODS or view.action in self.public_actions + self.private_actions: + return True + return False + + def has_object_permission(self, request, view, obj): + if request.user.is_admin: + return True + if obj.scope == ProjectTypeScope.GLOBAL: + return request.method in permissions.SAFE_METHODS or view.action in self.public_actions or \ + request.user.is_designer + elif obj.scope == ProjectTypeScope.PRIVATE: + return obj.linked_user == request.user and settings.ENABLE_PRIVATE_DESIGNS + elif obj.scope == ProjectTypeScope.PROJECT: + if request.user not in set(map(lambda m: m.user, obj.linked_project.members.all())): + return False + return request.method in permissions.SAFE_METHODS or view.action in self.public_actions or \ + (view.action in self.private_actions and obj.source == SourceEnum.CUSTOMIZED) or \ + (request.user.is_designer and obj.source == SourceEnum.CUSTOMIZED) + return False + + +class ProjectTypeSubresourcePermissions(permissions.BasePermission): + def has_permission(self, request, view): + if request.user.is_admin: + return True + project_type = view.get_project_type() + if project_type.scope == ProjectTypeScope.GLOBAL: + return request.method in permissions.SAFE_METHODS or \ + request.user.is_designer + elif project_type.scope == ProjectTypeScope.PRIVATE: + return project_type.linked_user == request.user and settings.ENABLE_PRIVATE_DESIGNS + elif project_type.scope == ProjectTypeScope.PROJECT: + if request.user not in set(map(lambda m: m.user, project_type.linked_project.members.all())): + return False + return request.method in permissions.SAFE_METHODS or \ + project_type.source in [SourceEnum.CUSTOMIZED] + return False + + +class IsTemplateEditorOrReadOnly(permissions.BasePermission): + def has_permission(self, request, view): + return request.method in permissions.SAFE_METHODS or request.user.is_admin or request.user.is_template_editor + + +class ProjectPermissions(permissions.BasePermission): + def has_permission(self, request, view): + if request.user.is_guest: + if not settings.GUEST_USERS_CAN_CREATE_PROJECTS and view.action in ['create', 'copy', 'import_']: + return False + elif not settings.GUEST_USERS_CAN_IMPORT_PROJECTS and view.action in ['import_']: + return False + elif not settings.GUEST_USERS_CAN_DELETE_PROJECTS and view.action in ['destroy']: + return False + elif not settings.GUEST_USERS_CAN_UPDATE_PROJECT_SETTINGS and view.action in ['update', 'partial_update', 'readonly', 'customize_projecttype', 'archive', 'archive_check']: + return False + + if view.action in ['archive_check', 'archive'] and not license.ProfessionalLicenseRequired().has_permission(request, view): + return False + return True + + def has_object_permission(self, request, view, obj): + if request.method in permissions.SAFE_METHODS or view.action in ['check', 'preview', 'generate', 'copy', 'export', 'export_all', 'readonly', 'archive', 'archive_check']: + return True + return not obj.readonly + + +class ProjectSubresourcePermissions(permissions.BasePermission): + def has_permission(self, request, view): + if request.method in permissions.SAFE_METHODS: + return True + return not view.get_project().readonly + + +class UserPublicKeyPermissions(permissions.BasePermission): + def has_permission(self, request, view): + if view.kwargs.get('pentestuser_pk') == 'self': + return True + if request.user.is_admin or request.user.is_user_manager: + return request.method in permissions.SAFE_METHODS + return False + + +class ArchivedProjectKeyPartPermissions(permissions.BasePermission): + def has_object_permission(self, request, view, obj): + if view.action in ['public_key_encrypted_data', 'decrypt']: + return request.user.is_admin or request.user == obj.user + return True diff --git a/api/src/reportcreator_api/pentests/querysets.py b/api/src/reportcreator_api/pentests/querysets.py new file mode 100644 index 0000000..3cf346a --- /dev/null +++ b/api/src/reportcreator_api/pentests/querysets.py @@ -0,0 +1,454 @@ +import functools +import io +import json +import operator +import random +import copy +import secrets +import tempfile +import uuid +from base64 import b64encode, b64decode +from django.conf import settings +from django.db import models, transaction +from django.db.models.functions import Coalesce +from django.core.exceptions import ValidationError +from reportcreator_api.archive import crypto +from reportcreator_api.archive.crypto.base import ReadIntoAdapter +from reportcreator_api.archive.crypto.secret_sharing import ShamirLarge +from reportcreator_api.archive.crypto.storage import EncryptedFileAdapter, IterableToFileAdapter + +from reportcreator_api.pentests.customfields.predefined_fields import FINDING_FIELDS_CORE, FINDING_FIELDS_PREDEFINED +from reportcreator_api.pentests.customfields.types import FieldOrigin, parse_field_definition +from reportcreator_api.users.models import PentestUser +from reportcreator_api.utils.files import normalize_filename +from reportcreator_api.utils.utils import groupby_to_dict, omit_keys +from reportcreator_api.archive.crypto import pgp + + +class ProjectTypeQueryset(models.QuerySet): + def only_permitted(self, user): + if user.is_admin: + return self + pt_filters = models.Q(models.Q(linked_project=None) & models.Q(linked_user=None)) | \ + models.Q(linked_project__members__user=user) + if settings.ENABLE_PRIVATE_DESIGNS: + pt_filters |= models.Q(linked_user=user) + return self.filter(pt_filters) + + def custom_finding_field_definitions(self): + """ + Return all custom field definitions over all globally visible ProjectTypes. + Handle conflicting data types of custom fields by using the field of the first ProjectType + e.g. ProjectType1 defines custom_field: string; ProjectType2 defines custom_field: list[string] => use custom_field: string + """ + all_finding_field_definitions = self \ + .filter(linked_project=None) \ + .order_by('-created', 'id') \ + .values_list('finding_fields', flat=True) + return parse_field_definition(functools.reduce( + operator.or_, + map(lambda fd: dict(filter(lambda t: t[1].get('origin') == FieldOrigin.CUSTOM.value, fd.items())), all_finding_field_definitions), + {} + )) + + +class ProjectTypeManager(models.Manager.from_queryset(ProjectTypeQueryset)): + use_in_migrations = True + + @transaction.atomic() + def copy(self, instance, **kwargs): + from reportcreator_api.pentests.models import UploadedAsset + + assets = list(instance.assets.all()) + + # Copy model + instance = copy.copy(instance) + for k, v in (kwargs or {}).items(): + setattr(instance, k, v) + instance.pk = None + instance.lock_info_data = None + instance.save() + + # Copy all assets + for a in assets: + a.pk = None + a.linked_object = instance + UploadedAsset.objects.bulk_create(assets) + + return instance + + +class PentestProjectQueryset(models.QuerySet): + def only_permitted(self, user): + if user.is_admin: + return self + return self.filter(members__user=user) + + +class PentestProjectManager(models.Manager.from_queryset(PentestProjectQueryset)): + @transaction.atomic() + def copy(self, instance, **kwargs): + from reportcreator_api.pentests.models import PentestFinding, ReportSection, NotebookPage, UploadedImage, UploadedProjectFile, SourceEnum, ProjectMemberInfo + + findings = list(instance.findings.all()) + sections = list(instance.sections.all()) + notes = list(instance.notes.select_related('parent').all()) + members = list(instance.members.all()) + images = list(instance.images.all()) + files = list(instance.files.all()) + + # Copy project + instance = copy.copy(instance) + for k, v in (kwargs or {}).items(): + setattr(instance, k, v) + instance.pk = None + instance.readonly = False + instance.lock_info_data = None + instance.project_type = instance.project_type.copy( + linked_user=None, + source=SourceEnum.SNAPSHOT if instance.project_type.source not in [SourceEnum.IMPORTED_DEPENDENCY, SourceEnum.CUSTOMIZED] else instance.project_type.source) + instance.save() + instance.project_type.linked_project = instance + instance.project_type.save(update_fields=['linked_project']) + + for mi in members: + mi.pk = None + mi.project = instance + ProjectMemberInfo.objects.bulk_create(members) + + # Copy sections + ReportSection.objects.filter(project=instance).delete() + for s in sections: + s.pk = None + s.project = instance + ReportSection.objects.bulk_create(sections) + + # Copy findings + for f in findings: + f.pk = None + f.project = instance + PentestFinding.objects.bulk_create(findings) + + # Copy notes + for n in notes: + n.pk = None + n.project = instance + NotebookPage.objects.bulk_create(notes) + # Update parent to copied model + for n in notes: + if n.parent: + n.parent = next(filter(lambda pn: pn.note_id == n.parent.note_id, notes), None) + NotebookPage.objects.bulk_update(notes, ['parent']) + + # Copy images + for i in images: + i.pk = None + i.linked_object = instance + UploadedImage.objects.bulk_create(images) + + # Copy files + for f in files: + f.pk = None + f.linked_object = instance + UploadedProjectFile.objects.bulk_create(files) + + return instance + + @transaction.atomic + def set_members(self, instance, members): + from reportcreator_api.pentests.models import ProjectMemberInfo + + if members is None: + return + + for m in members: + m.pk = None + m.project = instance + m.roles = list(set(m.roles)) + + members_map = dict(map(lambda m: (m.user_id, m), members)) + existing_members_map = dict(map(lambda m: (m.user_id, m), instance.members.all())) + + if new_members := omit_keys(members_map, existing_members_map.keys()).values(): + ProjectMemberInfo.objects.bulk_create(new_members) + if removed_members := omit_keys(existing_members_map, members_map.keys()).values(): + ProjectMemberInfo.objects.filter(id__in=[m.pk for m in removed_members]).delete() + + updated_members = [] + for k, m in existing_members_map.items(): + if k in members_map and set(m.roles) != set(members_map[k].roles): + m.roles = members_map[k].roles + updated_members.append(m) + if updated_members: + ProjectMemberInfo.objects.bulk_update(updated_members, ['roles']) + + def add_member(self, user, projects): + from reportcreator_api.pentests.models import ProjectMemberInfo + + existing_members = set(ProjectMemberInfo.objects \ + .filter(project__in=projects) \ + .filter(user=user) \ + .values_list('project_id', flat=True)) + new_members = [ProjectMemberInfo(user=user, project=p) for p in projects if p.id not in existing_members] + ProjectMemberInfo.objects.bulk_create(new_members) + + +class PentestFindingQueryset(models.QuerySet): + def only_permitted(self, user): + if user.is_admin: + return self + return self.filter(project__members__user=user) + + +class ReportSectionQueryset(models.QuerySet): + def only_permitted(self, user): + if user.is_admin: + return self + return self.filter(project__members__user=user) + + +class FindingTemplateQueryset(models.QuerySet): + def increment_usage_count(self, by=1): + return self.update(usage_count=models.F('usage_count') + models.Value(by)) + + def get_field_definition(self): + from reportcreator_api.pentests.models import ProjectType + return FINDING_FIELDS_CORE | FINDING_FIELDS_PREDEFINED | \ + ProjectType.objects.custom_finding_field_definitions() | \ + FINDING_FIELDS_PREDEFINED | FINDING_FIELDS_CORE + + +class NotebookPageQuerySet(models.QuerySet): + pass + + +class NotebookPageManager(models.Manager.from_queryset(NotebookPageQuerySet)): + def create(self, project=None, user=None, order=None, parent=None, **kwargs): + from reportcreator_api.pentests.models import NotebookPage + + if not order and (project or user): + if project: + order_qs = NotebookPage.objects.filter(project=project) + elif user: + order_qs = NotebookPage.objects.filter(user=user) + order = Coalesce( + models.Subquery( + order_qs + .filter(parent=parent) + .values('parent') + .annotate(max_order=models.Max('order')) + .values_list('max_order')), + models.Value(0) + ) + models.Value(1) + + obj = super().create(project=project, user=user, parent=parent, order=order, **kwargs) + obj.refresh_from_db() + return obj + + def check_parent_and_order(self, instances, missing_instances=None): + # * Update order values: first all notes in data, then missing notes (keep order of missing notes, but move to end) + # * and validate no circular dependencies: beginning from the tree root, every note must be in the tree. + # If it does not have a path from root to node, there is a circular dependency. + missing_instances = missing_instances or [] + parent_dict = groupby_to_dict(instances, key=lambda n: n.parent_id or uuid.UUID(int=0)) + in_tree = set() + def to_tree(parent_id): + layer = parent_dict.get(parent_id, []) + layer_sorted = sorted(filter(lambda n: n not in missing_instances, layer), key=lambda n: n.order) + \ + sorted(filter(lambda n: n in missing_instances, layer), key=lambda n: n.order) + for idx, n in enumerate(layer_sorted): + n.order = idx + 1 + in_tree.add(n) + to_tree(n.id) + to_tree(uuid.UUID(int=0)) + if len(in_tree) != len(instances): + raise ValidationError('Circular parent relationships detected') + + +class UserPublicKeyQuerySet(models.QuerySet): + def only_enabled(self): + return self.filter(enabled=True) + + +class UserPublicKeyManager(models.Manager.from_queryset(UserPublicKeyQuerySet)): + def create(self, public_key=None, public_key_info=None, **kwargs): + if not public_key_info and public_key: + public_key_info = pgp.public_key_info(public_key) + return super().create(public_key=public_key, public_key_info=public_key_info, **kwargs) + + +class ArchivedProjectQuerySet(models.QuerySet): + def only_permitted(self, user): + if user.is_admin: + return self + return self.filter(key_parts__user=user) + + +class ArchivedProjectManager(models.Manager.from_queryset(ArchivedProjectQuerySet)): + def get_possible_archive_users_for_project(self, project): + from reportcreator_api.pentests.models import UserPublicKey + return PentestUser.objects \ + .filter(models.Q(is_global_archiver=True) | models.Q(pk__in=project.members.values_list('user_id'))) \ + .prefetch_related(models.Prefetch('public_keys', UserPublicKey.objects.only_enabled())) + + def get_archive_users_for_project(self, project): + return self.get_possible_archive_users_for_project(project) \ + .only_active() \ + .only_with_public_keys() \ + + + @transaction.atomic() + def create_from_project(self, project, name=None, users=None, delete_project=True): + from reportcreator_api.pentests.models import ArchivedProject, ArchivedProjectKeyPart, ArchivedProjectPublicKeyEncryptedKeyPart + + name = name or project.name + users = list(users or self.get_archive_users_for_project(project)) + if len(users) < settings.ARCHIVING_THRESHOLD: + raise ValueError('Too few users') + + archive = ArchivedProject( + name=name or project.name, + threshold=settings.ARCHIVING_THRESHOLD, + ) + key_parts_to_create = [] + encrypted_key_parts_to_create = [] + + # Create a random AES-256 key for encrypting the whole archive + aes_key = secrets.token_bytes(32) + # Split the AES key using shamir secret sharing and distribute key parts to users + shamir_key_parts = ShamirLarge.split_large(k=archive.threshold, n=len(users), secret=aes_key) + for user, (shamir_key_id, shamir_key) in zip(users, shamir_key_parts): + # Encrypt the per-user shamir key with a per-user AES key + # This is mainly used for integrity protection to detect corrupted/user-forged shamir key parts. + # This additional encryption layer makes it possible to other public key encryptions + # other than PGP (which uses its own file encryption layer on top of public keys) in the future. + user_aes_key = secrets.token_bytes(32) + shamir_key_part_data_io = io.BytesIO() + with crypto.open(shamir_key_part_data_io, mode='wb', key=crypto.EncryptionKey(id=None, key=user_aes_key)) as c: + c.write(json.dumps({'key_id': shamir_key_id, 'key': b64encode(shamir_key).decode()}).encode()) + + key_part_model = ArchivedProjectKeyPart( + archived_project=archive, + user=user, + encrypted_key_part=shamir_key_part_data_io.getvalue() + ) + key_parts_to_create.append(key_part_model) + + # Encrypt the per-user AES key with each user's public key + user_public_keys = [pk for pk in user.public_keys.all() if pk.enabled] + if not user_public_keys: + raise ValueError('User does not have any usable public key') + for public_key in user_public_keys: + encrypted_key_parts_to_create.append(ArchivedProjectPublicKeyEncryptedKeyPart( + key_part=key_part_model, + public_key=public_key, + encrypted_data=public_key.encrypt(data=b64encode(user_aes_key) + b'\n') + )) + + # export archive and encrypt with AES-256 key and upload to storage + from reportcreator_api.archive.import_export import export_projects + archive.file = EncryptedFileAdapter( + file=IterableToFileAdapter(export_projects([project], export_all=True), name=str(uuid.uuid4())), + key=crypto.EncryptionKey(id=None, key=aes_key) + ) + + # Create models in DB + archive.save() + ArchivedProjectKeyPart.objects.bulk_create(key_parts_to_create) + ArchivedProjectPublicKeyEncryptedKeyPart.objects.bulk_create(encrypted_key_parts_to_create) + + # Delete project + if delete_project: + project.delete() + + return archive + + @transaction.atomic() + def restore_project(self, archive): + from reportcreator_api.pentests.models import PentestProject + from reportcreator_api.archive.import_export.import_export import import_projects + + # Combine key parts with shamir secret sharing to decrypt the archive key + key_parts = list(filter(lambda k: k.is_decrypted, archive.key_parts.all())) + if len(key_parts) < archive.threshold: + raise ValueError('Too few key parts available') + archive_key = ShamirLarge.combine_large([(k.key_part['key_id'], b64decode(k.key_part['key'])) for k in key_parts]) + + # Decrypt archive and import project + with tempfile.SpooledTemporaryFile(max_size=settings.FILE_UPLOAD_MAX_MEMORY_SIZE, mode='w+b') as f: + with crypto.open(archive.file, mode='rb', key=crypto.EncryptionKey(id=None, key=archive_key)) as c: + while chunk := c.read(settings.FILE_UPLOAD_MAX_MEMORY_SIZE): + f.write(chunk) + f.seek(0) + projects = import_projects(f) + + # Add archivers as members (only relevant for global archivers) + for k in archive.key_parts.all(): + PentestProject.objects.add_member(k.user, projects) + + # Delete archive + archive.delete() + return projects[0] + + +class UploadedFileQueryset(models.QuerySet): + def filter_name(self, name): + from reportcreator_api.pentests.models import UploadedFileBase + return self.filter(name_hash=UploadedFileBase.hash_name(name)) + + +class UploadedImageQueryset(UploadedFileQueryset): + def only_permitted(self, user): + if user.is_admin: + return self + return self.filter(linked_object__members__user=user) + + +class UploadedUserNotebookImageQueryset(UploadedFileQueryset): + def only_permitted(self, user): + if user.is_admin: + return self + return self.filter(linked_object=user) + + +class UploadedAssetQueryset(UploadedFileQueryset): + def only_permitted(self, user): + if user.is_admin: + return self + return self.filter(models.Q(linked_object__linked_project=None) | models.Q(linked_object__linked_project__members__user=user)) + + +class UploadedProjectFileQueryset(UploadedFileQueryset): + def only_permitted(self, user): + if user.is_admin: + return self + return self.filter(linked_object__members__user=user) + + +class UploadedFileManagerMixin: + def create(self, file, linked_object, name=None, **kwargs): + # Change name when a file with the same name already exists + name = normalize_filename(name or file.name or 'file') + while self.filter(linked_object=linked_object).filter_name(name).exists(): + if (ext_idx := name.rfind('.')) and ext_idx != -1: + name = name[:ext_idx] + '-' + str(random.randint(1, 1000000)) + name[ext_idx:] + + # Randomize filename in storage to not leak information + return super().create(file=file, name=name, linked_object=linked_object, **kwargs) + + +class UploadedImageManager(UploadedFileManagerMixin, models.Manager.from_queryset(UploadedImageQueryset)): + pass + + +class UploadedAssetManager(UploadedFileManagerMixin, models.Manager.from_queryset(UploadedAssetQueryset)): + pass + + +class UploadedUserNotebookImageManager(UploadedFileManagerMixin, models.Manager.from_queryset(UploadedUserNotebookImageQueryset)): + pass + + +class UploadedProjectFileManager(UploadedFileManagerMixin, models.Manager.from_queryset(UploadedProjectFileQueryset)): + pass + diff --git a/api/src/reportcreator_api/pentests/serializers.py b/api/src/reportcreator_api/pentests/serializers.py new file mode 100644 index 0000000..b5e0f31 --- /dev/null +++ b/api/src/reportcreator_api/pentests/serializers.py @@ -0,0 +1,719 @@ +from base64 import b64decode +import functools +import io +import json +from django.conf import settings +from rest_framework import serializers, exceptions +from django.db import transaction +from django.db.models import F, Exists, OuterRef +from django.utils import timezone +from reportcreator_api.archive import crypto + +from reportcreator_api.archive.crypto import pgp, CryptoError +from reportcreator_api.pentests.customfields.serializers import serializer_from_definition +from reportcreator_api.pentests.customfields.utils import HandleUndefinedFieldsOptions, check_definitions_compatible, ensure_defined_structure +from reportcreator_api.pentests.models import FindingTemplate, LockInfo, NotebookPage, PentestFinding, PentestProject, ProjectType, ProjectTypeScope, \ + ReportSection, SourceEnum, \ + UploadedAsset, UploadedImage, ProjectMemberInfo, ProjectMemberRole, UploadedProjectFile, UploadedUserNotebookImage, \ + UserPublicKey, ArchivedProject, ArchivedProjectKeyPart, ArchivedProjectPublicKeyEncryptedKeyPart +from reportcreator_api.users.models import PentestUser +from reportcreator_api.users.serializers import PentestUserSerializer, RelatedUserSerializer +from reportcreator_api.utils.files import compress_image +from reportcreator_api.utils.utils import omit_items + + +class LockInfoSerializer(serializers.ModelSerializer): + user = PentestUserSerializer(read_only=True) + + def __init__(self, *args, **kwargs): + kwargs['read_only'] = True + super().__init__(*args, **kwargs) + + class Meta: + model = LockInfo + fields = [ + 'created', 'updated', 'last_ping', 'expires', 'user', + ] + read_only_fields = ['last_ping'] + + +class ProjectTypeShortSerializer(serializers.ModelSerializer): + details = serializers.HyperlinkedIdentityField(view_name='projecttype-detail', read_only=True) + assets = serializers.HyperlinkedIdentityField(view_name='uploadedasset-list', lookup_url_kwarg='projecttype_pk', read_only=True) + + class Meta: + model = ProjectType + fields = [ + 'id', 'created', 'updated', 'source', 'scope', + 'name', 'language', + 'details', 'assets', + ] + + +class ProjectTypeDetailSerializer(ProjectTypeShortSerializer): + lock_info = LockInfoSerializer() + report_template = serializers.CharField(required=False, allow_blank=True) + report_styles = serializers.CharField(required=False, allow_blank=True) + report_preview_data = serializers.DictField(required=False) + + class Meta(ProjectTypeShortSerializer.Meta): + fields = ProjectTypeShortSerializer.Meta.fields + [ + 'lock_info', + 'report_template', 'report_styles', 'report_preview_data', + 'report_fields', 'report_sections', + 'finding_fields', 'finding_field_order', + ] + + +class ProjectTypeCreateSerializer(ProjectTypeDetailSerializer): + scope = serializers.ChoiceField(choices=[c for c in ProjectTypeScope.choices if c[0] in [ProjectTypeScope.GLOBAL.value, ProjectTypeScope.PRIVATE.value]]) + + def validate_scope(self, value): + if value == ProjectTypeScope.PRIVATE and not settings.ENABLE_PRIVATE_DESIGNS: + raise serializers.ValidationError(f'Scope "{value}" not supported') + elif value == ProjectTypeScope.GLOBAL and not (self.context['request'].user.is_admin or self.context['request'].user.is_designer): + raise exceptions.PermissionDenied() + return value + + def create(self, validated_data): + scope = validated_data.pop('scope') + validated_data |= { + ProjectTypeScope.GLOBAL: {'linked_project': None, 'linked_user': None}, + ProjectTypeScope.PRIVATE: {'linked_project': None, 'linked_user': self.context['request'].user}, + }[scope] + return super().create(validated_data) + + +class ProjectTypePreviewSerializer(serializers.ModelSerializer): + report_template = serializers.CharField(required=False, allow_blank=True) + report_styles = serializers.CharField(required=False, allow_blank=True) + report_preview_data = serializers.DictField(required=False) + + class Meta: + model = ProjectType + fields = ['report_template', 'report_styles', 'report_preview_data'] + + +class ProjectTypeRelatedField(serializers.PrimaryKeyRelatedField): + def get_queryset(self): + return ProjectType.objects.only_permitted(self.context['request'].user) + + +class PentestFindingSerializer(serializers.ModelSerializer): + id = serializers.UUIDField(source='finding_id', read_only=True) + project = serializers.PrimaryKeyRelatedField(read_only=True) + project_type = ProjectTypeRelatedField(source='project.project_type_id', read_only=True) + lock_info = LockInfoSerializer(read_only=True) + template = serializers.PrimaryKeyRelatedField(read_only=True, source='template_id') + assignee = RelatedUserSerializer(required=False, allow_null=True, default=serializers.CreateOnlyDefault(serializers.CurrentUserDefault())) + + class Meta: + model = PentestFinding + fields = [ + 'id', 'created', 'updated', 'project', 'project_type', + 'language', 'lock_info', 'template', 'assignee', 'status', + ] + + def get_fields(self): + return super().get_fields() | { + 'data': serializer_from_definition(definition=self.context['project'].project_type.finding_fields_obj, **self.get_extra_kwargs().get('data', {})), + } + + def create(self, validated_data): + data = ensure_defined_structure( + value=validated_data.pop('data', {}), + definition=self.context['project'].project_type.finding_fields_obj, + handle_undefined=HandleUndefinedFieldsOptions.FILL_DEFAULT + ) + instance = PentestFinding( + project=self.context['project'], + **validated_data + ) + instance.update_data(data) + instance.save() + return instance + + def update(self, instance, validated_data): + instance.update_data(validated_data.pop('data', {})) + return super().update(instance, validated_data) + + +class PentestFindingFromTemplateSerializer(PentestFindingSerializer): + template = serializers.PrimaryKeyRelatedField(queryset=FindingTemplate.objects.all(), required=True, allow_null=False, source='template_id') + + class Meta(PentestFindingSerializer.Meta): + read_only_fields = ['data'] + + @transaction.atomic() + def create(self, validated_data): + template = validated_data.pop('template_id') + finding = super().create(validated_data | { + 'template_id': template.id, + 'data': template.data | validated_data.pop('data', {}), + }) + FindingTemplate.objects.filter(id=template.id).increment_usage_count() + return finding + + +class ProjectMemberInfoSerializer(serializers.ModelSerializer): + class Meta: + model = ProjectMemberInfo + fields = ['roles'] + + def __init__(self, user_serializer=PentestUserSerializer, *args, **kwargs): + super().__init__(*args, **kwargs) + self.user_serializer = user_serializer + + def get_related_user_serializer(self): + s = RelatedUserSerializer(user_serializer=self.user_serializer) + s.bind('user', self) + return s + + def to_representation(self, instance): + return self.get_related_user_serializer().to_representation(instance.user) | \ + super().to_representation(instance) + + def to_internal_value(self, data): + return super().to_internal_value(data) | { + 'user': self.get_related_user_serializer().to_internal_value(data) + } + + +class ImportedProjectMemberInfoSerializer(serializers.ModelSerializer): + roles = serializers.ListField(child=serializers.CharField(), allow_empty=True) + + class Meta(PentestUserSerializer.Meta): + fields = omit_items(PentestUserSerializer.Meta.fields, ['username']) + ['roles'] + extra_kwargs = { + 'id': {'read_only': False}, + } + + +class ImportedProjectMemberInfoListSerializer(serializers.ListSerializer): + child = ImportedProjectMemberInfoSerializer() + + def update(self, instance, validated_data): + updated = [] + for d in validated_data: + i = next(filter(lambda e: str(e.get('id')) == str(d.get('id')), instance), {}) + updated.append(i | d) + return updated + + +class PentestProjectSerializer(serializers.ModelSerializer): + project_type = ProjectTypeRelatedField() + force_change_project_type = serializers.BooleanField(required=False, default=False, write_only=True) + + members = ProjectMemberInfoSerializer(many=True, required=False) + imported_members = ImportedProjectMemberInfoListSerializer(required=False) + + details = serializers.HyperlinkedIdentityField(view_name='pentestproject-detail', read_only=True) + findings = serializers.HyperlinkedIdentityField(view_name='finding-list', lookup_url_kwarg='project_pk', read_only=True) + sections = serializers.HyperlinkedIdentityField(view_name='section-list', lookup_url_kwarg='project_pk', read_only=True) + notes = serializers.HyperlinkedIdentityField(view_name='projectnotebookpage-list', lookup_url_kwarg='project_pk', read_only=True) + images = serializers.HyperlinkedIdentityField(view_name='uploadedimage-list', lookup_url_kwarg='project_pk', read_only=True) + + class Meta: + model = PentestProject + fields = [ + 'id', 'created', 'updated', + 'name', 'project_type', 'force_change_project_type', 'language', 'readonly', 'source', + 'members', 'imported_members', + 'details', 'findings', 'sections', 'notes', 'images', + ] + read_only_fields = ['readonly'] + + def validate_project_type(self, value): + if self.instance and self.instance.project_type != value and not self.initial_data.get('force_change_project_type'): + res_finding = check_definitions_compatible(self.instance.project_type.finding_fields_obj, value.finding_fields_obj, path=('finding_fields',)) + res_report = check_definitions_compatible(self.instance.project_type.report_fields_obj, value.report_fields_obj, path=('report_fields',)) + if not res_finding[0] or not res_report[0]: + raise serializers.ValidationError(['Designs have incompatible field definitions. Converting might result in data loss.'] + res_report[1] + res_finding[1]) + + return value + + @transaction.atomic + def create(self, validated_data): + project_type = validated_data.pop('project_type').copy(linked_user=None, source=SourceEnum.SNAPSHOT, created=timezone.now()) + validated_data.pop('force_change_project_type') + + members = validated_data.pop('members', []) + + project = super().create(validated_data | { + 'project_type': project_type, + 'language': project_type.language, + 'custom_fields': ensure_defined_structure( + value={ + 'title': validated_data.get('name', 'Report Title'), + }, + definition=project_type.report_fields_obj, + handle_undefined=HandleUndefinedFieldsOptions.FILL_DEFAULT + ), + }) + + # add current user as member + if not any(map(lambda m: m.get('user') == self.context['request'].user, members)): + members.append({'user': self.context['request'].user, 'roles': ProjectMemberRole.default_roles}) + ProjectMemberInfo.objects.bulk_create([ProjectMemberInfo(**m, project=project) for m in members]) + + project_type.linked_project = project + project_type.save(update_fields=['linked_project']) + + return project + + def update(self, instance, validated_data): + members = validated_data.pop('members', None) + if (imported_members := validated_data.get('imported_members')) is not None: + validated_data['imported_members'] = self.fields['imported_members'].update(instance.imported_members, imported_members) + if (project_type := validated_data.get('project_type')) and instance.project_type != project_type and project_type.linked_project != instance: + validated_data['project_type'] = project_type.copy( + linked_project=instance, + linked_user=None, + source=SourceEnum.SNAPSHOT, + created=timezone.now()) + + instance = super().update(instance, validated_data) + if members is not None: + PentestProject.objects.set_members(instance=instance, members=[ProjectMemberInfo(**m) for m in members]) + return instance + + +class ReportSectionSerializer(serializers.ModelSerializer): + id = serializers.CharField(source='section_id', read_only=True) + project = serializers.PrimaryKeyRelatedField(read_only=True) + project_type = ProjectTypeRelatedField(source='project.project_type_id', read_only=True) + label = serializers.CharField(source='section_label', read_only=True) + fields = serializers.ListField(source='section_fields', child=serializers.CharField(), read_only=True) + lock_info = LockInfoSerializer() + assignee = RelatedUserSerializer(required=False, allow_null=True) + + class Meta: + model = ReportSection + fields = [ + 'id', 'label', 'fields', 'project', 'project_type', + 'language', 'lock_info', 'assignee', 'status', + ] + + def get_fields(self): + fields = super().get_fields() + data_field = serializers.DictField() + if self.instance and isinstance(self.instance, ReportSection): + data_field = serializer_from_definition(definition=self.instance.field_definition, **self.get_extra_kwargs().get('data', {})) + return fields | { + 'data': data_field + } + + def update(self, instance, validated_data): + instance.update_data(validated_data.pop('data', {})) + instance.project.save() + return super().update(instance, validated_data) + + +class FindingTemplateSerializer(serializers.ModelSerializer): + details = serializers.HyperlinkedIdentityField(view_name='findingtemplate-detail', read_only=True) + lock_info = LockInfoSerializer() + + class Meta: + model = FindingTemplate + fields = [ + 'id', 'created', 'updated', 'details', + 'lock_info', 'usage_count', 'source', + 'tags', 'language', 'status', + ] + read_only_fields = ['usage_count'] + extra_kwargs = { + 'tags': {'required': False, 'allow_empty': True} + } + + def get_fields(self): + return super().get_fields() | { + 'data': serializer_from_definition(definition=FindingTemplate.field_definition), + } + + def create(self, validated_data): + data = validated_data.pop('data', {}) + instance = FindingTemplate(**validated_data) + instance.update_data(data) + instance.save() + return instance + + def update(self, instance, validated_data): + instance.update_data(validated_data.pop('data', {})) + return super().update(instance, validated_data) + + +class NotebookPageSerializer(serializers.ModelSerializer): + id = serializers.UUIDField(source='note_id', read_only=True) + lock_info = LockInfoSerializer() + title = serializers.CharField(required=False, allow_blank=True) + text = serializers.CharField(required=False, allow_blank=True) + parent = serializers.UUIDField(source='parent.note_id', allow_null=True, read_only=True) + + class Meta: + model = NotebookPage + fields = [ + 'id', 'created', 'updated', 'lock_info', + 'title', 'text', 'checked', 'icon_emoji', 'status_emoji', + 'order', 'parent', + ] + extra_kwargs = { + 'order': {'read_only': True}, + } + + +class NotebookPageCreateSerializer(NotebookPageSerializer): + parent = serializers.UUIDField(source='parent.note_id', allow_null=True, required=False) + + class Meta(NotebookPageSerializer.Meta): + extra_kwargs = { + 'order': {'read_only': False, 'required': False, 'allow_null': True}, + } + + def get_notebook_object(self): + return None + + def validate_parent(self, value): + if value: + parent = NotebookPage.objects \ + .filter(**self.get_notebook_object()) \ + .filter(note_id=value) \ + .first() + if not parent: + raise serializers.ValidationError('Invalid note id') + return parent + return value + + @transaction.atomic() + def create(self, validated_data): + validated_data['parent'] = validated_data.get('parent', {}).get('note_id') + + if validated_data.get('order'): + NotebookPage.objects \ + .filter(**self.get_notebook_object()) \ + .filter(parent=validated_data.get('parent')) \ + .filter(order__gte=validated_data.get('order')) \ + .update(order=F('order') + 1) + else: + validated_data.pop('order', None) + + return super().create(validated_data | self.get_notebook_object()) + + +class ProjectNotebookPageCreateSerializer(NotebookPageCreateSerializer): + def get_notebook_object(self): + return {'project': self.context['project']} + + +class UserNotebookPageCreateSerializer(NotebookPageCreateSerializer): + def get_notebook_object(self): + return {'user': self.context['user']} + + +class NotebookPageSortSerializer(serializers.ModelSerializer): + id = serializers.UUIDField(source='note_id') + parent = serializers.UUIDField(source='parent.note_id', allow_null=True) + + class Meta: + model = NotebookPage + fields = ['id', 'parent', 'order'] + + def validate_id(self, value): + if not next(filter(lambda n: n.note_id == value, self.parent.instance), None): + raise serializers.ValidationError('Invalid note id') + return value + + def validate_parent(self, value): + parent = next(filter(lambda n: n.note_id == value, self.parent.instance), None) + if value is not None and not parent: + raise serializers.ValidationError('Invalid note id') + return parent + + +class NotebookPageSortListSerializer(serializers.ListSerializer): + def __init__(self, *args, **kwargs): + super().__init__(child=NotebookPageSortSerializer(), *args, **kwargs) + + def update(self, instance, validated_data): + # Update values + missing_notes = [] + for note in instance: + if data := next(filter(lambda d: note.note_id == d.get('note_id'), validated_data), None): + note.parent = data.get('parent', {}).get('note_id') + note.order = data.get('order') + else: + missing_notes.append(note) + + NotebookPage.objects.check_parent_and_order(instance, missing_notes) + NotebookPage.objects.bulk_update(instance, ['parent_id', 'order']) + return instance + + +class UserPublicKeySerializer(serializers.ModelSerializer): + class Meta: + model = UserPublicKey + fields = ['id', 'created', 'updated', 'name', 'enabled', 'public_key', 'public_key_info'] + read_only_fields = ['public_key', 'public_key_info'] + + +class UserPublicKeyRegisterBeginSerializer(UserPublicKeySerializer): + class Meta(UserPublicKeySerializer.Meta): + read_only_fields = ['public_key_info'] + + def create(self, validated_data): + try: + public_key_info = pgp.public_key_info(validated_data['public_key']) + except CryptoError as ex: + raise serializers.ValidationError(detail=ex.args[0]) from ex + + return UserPublicKey(**validated_data | { + 'public_key_info': public_key_info + }) + + +class ArchivedProjectKeyPartSerializer(serializers.ModelSerializer): + user = PentestUserSerializer(read_only=True) + + class Meta: + model = ArchivedProjectKeyPart + fields = ['id', 'created', 'updated', 'user', 'is_decrypted', 'decrypted_at'] + + +class ArchivedProjectSerializer(serializers.ModelSerializer): + key_parts = ArchivedProjectKeyPartSerializer(many=True, read_only=True) + + class Meta: + model = ArchivedProject + fields = ['id', 'created', 'updated', 'name', 'threshold', 'key_parts'] + + +class ArchivedProjectPublicKeyEncryptedKeyPartSerializer(serializers.ModelSerializer): + public_key = UserPublicKeySerializer(read_only=True) + + class Meta: + model = ArchivedProjectPublicKeyEncryptedKeyPart + fields = ['id', 'created', 'updated', 'public_key', 'encrypted_data'] + + +class ArchivedProjectKeyPartDecryptSerializer(serializers.Serializer): + data = serializers.CharField() + + def validate_data(self, value): + try: + return b64decode(value) + except Exception: + raise serializers.ValidationError('Invalid format. Expected base64 encoded data') + + def validate(self, attrs): + if self.instance.is_decrypted: + raise serializers.ValidationError('Already decrypted') + return super().validate(attrs) + + def update(self, instance, validated_data): + try: + with crypto.open(io.BytesIO(instance.encrypted_key_part), mode='rb', key=crypto.EncryptionKey(id=None, key=validated_data['data'])) as c: + instance.key_part = json.loads(c.read()) + instance.decrypted_at = timezone.now() + except Exception as ex: + raise serializers.ValidationError('Decryption of key part failed') from ex + instance.save() + output = { + 'status': 'key-part-decrypted' + } + + # Restore whole project when enough key parts are decrypted + archive = self.context['archived_project'] + available_key_parts = list(archive.key_parts.exclude(decrypted_at=None)) + if len(available_key_parts) >= archive.threshold: + project = ArchivedProject.objects.restore_project(archive) + output |= { + 'status': 'project-restored', + 'project_id': project.id, + } + + return output + + +class UploadedFileSerilaizerBase(serializers.ModelSerializer): + compress_images = True + + resource_type = serializers.SerializerMethodField() + + class Meta: + fields = ['id', 'created', 'updated', 'resource_type', 'name', 'file'] + extra_kwargs = { + 'file': {'write_only': True}, + 'name': {'required': False}, + } + + def get_resource_type(self, obj): + return None + + def get_linked_object(self): + return None + + def create(self, validated_data): + if self.compress_images: + validated_data['file'], validated_data['name'] = compress_image(validated_data['file'], validated_data.get('name')) + validated_data['linked_object'] = self.get_linked_object() + validated_data['uploaded_by'] = self.context['request'].user + return super().create(validated_data) + + +class UploadedImageSerializer(UploadedFileSerilaizerBase): + class Meta(UploadedFileSerilaizerBase.Meta): + model = UploadedImage + + def get_linked_object(self): + return self.context['project'] + + def get_resource_type(self, obj): + return 'image' + + +class UploadedUserNotebookImageSerializer(UploadedFileSerilaizerBase): + class Meta(UploadedFileSerilaizerBase.Meta): + model = UploadedUserNotebookImage + + def get_linked_object(self): + return self.context['user'] + + def get_resource_type(self, obj): + return 'user-notebook-image' + + +class UploadedAssetSerializer(UploadedFileSerilaizerBase): + class Meta(UploadedFileSerilaizerBase.Meta): + model = UploadedAsset + + def get_linked_object(self): + return self.context['project_type'] + + def get_resource_type(self, obj): + return 'asset' + + +class UploadedProjectFileSerilaizer(UploadedFileSerilaizerBase): + compress_images = False + + class Meta(UploadedFileSerilaizerBase.Meta): + model = UploadedProjectFile + + def get_linked_object(self): + return self.context['project'] + + def get_resource_type(self, obj): + return 'file' + + +class PreviewPdfOptionsSerializer(serializers.Serializer): + project_type = ProjectTypeRelatedField(required=False, allow_null=True) + report_template = serializers.CharField(required=False, allow_null=True, allow_blank=True) + report_styles = serializers.CharField(required=False, allow_null=True, allow_blank=True) + + +class PublishPdfOptionsSerializer(serializers.Serializer): + password = serializers.CharField(required=False, allow_null=True, allow_blank=True) + + +class ImportSerializer(serializers.Serializer): + file = serializers.FileField() + + +class PentestProjectReadonlySerializer(serializers.ModelSerializer): + class Meta: + model = PentestProject + fields = ['readonly'] + + +class CopySerializer(serializers.Serializer): + def update(self, instance, validated_data): + return instance.copy() + + +class ProjectTypeCopySerializer(serializers.ModelSerializer): + scope = serializers.ChoiceField(choices=[c for c in ProjectTypeScope.choices if c[0] in [ProjectTypeScope.GLOBAL.value, ProjectTypeScope.PRIVATE.value]]) + + class Meta: + model = ProjectType + fields = ['name', 'scope'] + extra_kwargs = { + 'name': {'required': False}, + } + + def validate_scope(self, value): + if value == ProjectTypeScope.PRIVATE and not settings.ENABLE_PRIVATE_DESIGNS: + raise serializers.ValidationError(f'Scope "{value}" not supported') + elif value == ProjectTypeScope.GLOBAL and not (self.context['request'].user.is_admin or self.context['request'].user.is_designer): + raise exceptions.PermissionDenied() + return value + + def update(self, instance, validated_data): + return instance.copy( + name='Copy of ' + instance.name, + source=SourceEnum.CREATED, + created=timezone.now(), + **({ + ProjectTypeScope.GLOBAL: {'linked_user': None, 'linked_project': None}, + ProjectTypeScope.PRIVATE: {'linked_user': self.context['request'].user, 'linked_project': None}, + }[validated_data.pop('scope')]) + ) + + +class PentestProjectCopySerializer(serializers.ModelSerializer): + project_type = ProjectTypeRelatedField(required=False) + + class Meta: + model = PentestProject + fields = ['name', 'project_type'] + extra_kwargs = {'name': {'required': False}} + + def update(self, instance, validated_data): + return instance.copy( + name='Copy of ' + instance.name, + source=SourceEnum.CREATED, + created=timezone.now(), + **validated_data, + ) + + +class PentestProjectCreateArchiveSerializer(serializers.Serializer): + @functools.cache + def get_archive_users(self): + return ArchivedProject.objects.get_archive_users_for_project(self.instance) + + def validate(self, attrs): + if not self.instance.readonly: + raise serializers.ValidationError('Cannot archive non-finished project') + if len(self.get_archive_users()) < settings.ARCHIVING_THRESHOLD: + raise serializers.ValidationError('Too few users') + return super().validate(attrs) + + @transaction.atomic() + def update(self, instance, validated_data): + return ArchivedProject.objects.create_from_project( + project=instance, + users=self.get_archive_users(), + delete_project=True, + ) + + +class PentestUserCheckArchiveSerializer(PentestUserSerializer): + is_project_member = serializers.BooleanField() + has_public_keys = serializers.BooleanField() + can_restore = serializers.SerializerMethodField() + warnings = serializers.SerializerMethodField() + + class Meta(PentestUserSerializer.Meta): + fields = PentestUserSerializer.Meta.fields + ['is_active', 'is_global_archiver', 'is_project_member', 'has_public_keys', 'can_restore', 'warnings'] + + def get_can_restore(self, obj): + return obj.is_active and obj.has_public_keys and (obj.is_global_archiver or obj.is_project_member) + + def get_warnings(self, obj): + warnings = [] + if not obj.is_active: + warnings.append('User is not active') + if not obj.has_public_keys: + warnings.append('User has no public keys enabled') + return warnings + diff --git a/api/src/reportcreator_api/pentests/signals.py b/api/src/reportcreator_api/pentests/signals.py new file mode 100644 index 0000000..cc5607d --- /dev/null +++ b/api/src/reportcreator_api/pentests/signals.py @@ -0,0 +1,118 @@ +from django.db.models import signals +from django.dispatch import receiver +from reportcreator_api.pentests.customfields.types import parse_field_definition +from reportcreator_api.pentests.customfields.utils import HandleUndefinedFieldsOptions, ensure_defined_structure, has_field_structure_changed + +from reportcreator_api.pentests.models import PentestFinding, PentestProject, ProjectType, ReportSection, UploadedAsset, UploadedImage, UploadedProjectFile, UploadedUserNotebookImage +from reportcreator_api.pentests.models.archive import ArchivedProject +from reportcreator_api.utils.models import disable_for_loaddata + + +@receiver(signals.pre_save, sender=PentestProject) +@disable_for_loaddata +def project_project_type_changed_presave(sender, instance, *args, **kwargs): + if instance.id is None or instance._state.adding or 'project_type_id' in instance.changed_fields: + # Convert report data + instance.update_data(ensure_defined_structure( + value=instance.data_all, + definition=instance.project_type.report_fields_obj, + handle_undefined=HandleUndefinedFieldsOptions.FILL_DEFAULT, + include_undefined=True)) + + +@receiver(signals.post_save, sender=PentestProject) +@disable_for_loaddata +def project_project_type_changed_postsave(sender, instance, created, *args, **kwargs): + """ + When the project_type of a project changed, update the structure of all fields + """ + if created or 'project_type_id' in instance.changed_fields: + # Create/delete report sections + existing_sections = {s.section_id for s in instance.sections.all()} + all_sections = {s.get('id') for s in instance.project_type.report_sections} + if new_sections := all_sections - existing_sections: + ReportSection.objects.bulk_create([ReportSection(project=instance, section_id=s) for s in new_sections]) + if removed_sections := existing_sections - all_sections: + ReportSection.objects \ + .filter(project=instance) \ + .filter(section_id__in=removed_sections) \ + .delete() + + # Update finding fields + updated_findings = list(instance.findings.all().select_related('project__project_type')) + for finding in updated_findings: + finding.update_data(ensure_defined_structure( + value=finding.data_all, + definition=instance.project_type.finding_fields_obj, + handle_undefined=HandleUndefinedFieldsOptions.FILL_DEFAULT, + include_undefined=True)) + PentestFinding.objects.bulk_update( + filter(lambda f: f.has_changed, updated_findings), + fields=['custom_fields']) + + +@receiver(signals.post_save, sender=ProjectType) +@disable_for_loaddata +def project_type_field_definition_changed(sender, instance, *args, **kwargs): + """ + When report_fields or finding_fields structure changed, update the field structure of all projects that are based on this project_type + """ + + if (diff := instance.get_field_diff('report_fields')) and has_field_structure_changed(parse_field_definition(diff[0]), instance.report_fields_obj): + # Update structure of all reports using that project_type + updated_projects = list(PentestProject.objects.filter(project_type=instance).select_related('project_type')) + for project in updated_projects: + project.update_data(ensure_defined_structure( + value=project.data_all, + definition=instance.report_fields_obj, + handle_undefined=HandleUndefinedFieldsOptions.FILL_DEFAULT, + include_undefined=True)) + PentestProject.objects.bulk_update(updated_projects, fields=['custom_fields']) + + if (diff := instance.get_field_diff('finding_fields')) and has_field_structure_changed(parse_field_definition(diff[0]), instance.finding_fields_obj): + # Update structure of all findings of this project_type + updated_findings = list(PentestFinding.objects.filter(project__project_type=instance).select_related('project__project_type')) + for finding in updated_findings: + finding.update_data(ensure_defined_structure( + value=finding.data_all, + definition=instance.finding_fields_obj, + handle_undefined=HandleUndefinedFieldsOptions.FILL_DEFAULT, + include_undefined=True)) + PentestFinding.objects.bulk_update(updated_findings, fields=['custom_fields']) + + if (diff := instance.get_field_diff('report_sections')): + sections_prev = set(map(lambda s: s.get('id'), diff[0])) + sections_curr = set(map(lambda s: s.get('id'), diff[1])) + if sections_prev != sections_curr: + # Create new sections + if new_sections := sections_curr - sections_prev: + sections_to_create = [] + for project in PentestProject.objects.filter(project_type=instance).iterator(): + sections_to_create.extend([ReportSection(project=project, section_id=s) for s in new_sections]) + ReportSection.objects.bulk_create(sections_to_create) + + # Delete removed sections + if removed_sections := sections_prev - sections_curr: + ReportSection.objects \ + .filter(project__project_type=instance) \ + .filter(section_id__in=removed_sections) \ + .delete() + + +@receiver(signals.post_delete, sender=UploadedAsset) +@receiver(signals.post_delete, sender=UploadedImage) +@receiver(signals.post_delete, sender=UploadedUserNotebookImage) +@receiver(signals.post_delete, sender=UploadedProjectFile) +@receiver(signals.post_delete, sender=ArchivedProject) +def uploaded_file_deleted(sender, instance, *args, **kwargs): + # Delete file when instance is deleted from DB and file on filesystem is no loger referenced + if instance.file: + file_referenced = \ + UploadedAsset.objects.filter(file=instance.file) \ + .union(UploadedImage.objects.filter(file=instance.file)) \ + .union(UploadedUserNotebookImage.objects.filter(file=instance.file)) \ + .union(UploadedProjectFile.objects.filter(file=instance.file)) \ + .union(ArchivedProject.objects.filter(file=instance.file)) \ + .exists() + if not file_referenced: + instance.file.delete(save=False) diff --git a/api/src/reportcreator_api/pentests/storages.py b/api/src/reportcreator_api/pentests/storages.py new file mode 100644 index 0000000..894187f --- /dev/null +++ b/api/src/reportcreator_api/pentests/storages.py @@ -0,0 +1,17 @@ +from django.core.files.storage import storages + + +def get_uploaded_image_storage(): + return storages['uploaded_images'] + + +def get_uploaded_asset_storage(): + return storages['uploaded_assets'] + + +def get_uploaded_file_storage(): + return storages['uploaded_files'] + + +def get_archive_file_storage(): + return storages['archived_files'] diff --git a/api/src/reportcreator_api/pentests/tasks.py b/api/src/reportcreator_api/pentests/tasks.py new file mode 100644 index 0000000..c32e127 --- /dev/null +++ b/api/src/reportcreator_api/pentests/tasks.py @@ -0,0 +1,132 @@ +import elasticapm +from datetime import timedelta +from django.utils import timezone +from django.db.models import Q, F, Prefetch, Exists, OuterRef, Subquery, Max + +from reportcreator_api.pentests.models import NotebookPage, UploadedImage, UploadedProjectFile, UploadedUserNotebookImage, PentestProject +from reportcreator_api.users.models import PentestUser + + +def is_referenced_in_project(project, f): + # Project data (sections) + if f.name in str(project.data_all): + return True + + # Findings + for finding in project.findings.all(): + if f.name in str(finding.data_all): + return True + + # Notes + for note in project.notes.all(): + if f.name in note.text or f.name in note.title: + return True + return False + + +@elasticapm.async_capture_span() +async def cleanup_project_files(task_info): + # Only cleanup older files, to prevent race conditions: upload -> cleanup -> save text with reference -> referenced file already deleted + older_than = timezone.now() - timedelta(days=2) + projects = PentestProject.objects \ + .filter(created__lt=older_than) \ + .select_related('project_type') \ + .prefetch_related( + 'findings', + 'notes', + Prefetch('images', UploadedImage.objects.filter(updated__lt=older_than), to_attr='images_cleanup'), + Prefetch('files', UploadedProjectFile.objects.filter(updated__lt=older_than), to_attr='files_cleanup'), + ) + # Only check projects that changed since the last cleanup + if last_run := task_info['model'].last_success: + projects = projects.filter( + Q(updated__gt=last_run) | + Q(findings__updated__gt=last_run) | + Q(sections__updated__gt=last_run) | + Q(notes__updated__gt=last_run) + ) + projects = projects.distinct() + + # Check if files are referenced + # Requires checking in python because of DB encryption + cleanup_images = [] + cleanup_files = [] + async for p in projects: + for f in p.images_cleanup: + if not is_referenced_in_project(p, f): + cleanup_images.append(f) + for f in p.files_cleanup: + if not is_referenced_in_project(p, f): + cleanup_files.append(f) + + if cleanup_images: + await UploadedImage.objects \ + .filter(pk__in=map(lambda f: f.pk, cleanup_images)) \ + .adelete() + if cleanup_files: + await UploadedProjectFile.objects \ + .filter(pk__in=map(lambda f: f.pk, cleanup_files)) \ + .adelete() + + +@elasticapm.async_capture_span() +async def cleanup_usernotebook_files(task_info): + older_than = timezone.now() - timedelta(days=2) + + user_notes = NotebookPage.objects \ + .filter(user=OuterRef('pk')) + if last_run := task_info['model'].last_success: + user_notes = user_notes.filter(updated__gt=last_run) + + images_cleanup = UploadedUserNotebookImage.objects.filter(updated__lt=older_than) + + users = PentestUser.objects \ + .filter(created__lt=older_than) \ + .annotate(has_notes=Exists(user_notes)) \ + .annotate(has_images=Exists(images_cleanup.filter(linked_object=OuterRef('pk')))) \ + .filter(has_notes=True) \ + .filter(has_images=True) \ + .prefetch_related( + 'notes', + Prefetch('images', images_cleanup, to_attr='images_cleanup'), + ) + + cleanup_images = [] + async for u in users: + for f in u.images_cleanup: + for n in u.notes.all(): + if f.name in n.text or f.name in n.title: + break + else: + cleanup_images.append(f) + + if cleanup_images: + await UploadedUserNotebookImage.objects \ + .filter(pk__in=map(lambda f: f.pk, cleanup_images)) \ + .adelete() + + +async def cleanup_unreferenced_images_and_files(task_info): + await cleanup_project_files(task_info) + await cleanup_usernotebook_files(task_info) + + +def reset_stale_archive_restores(task_info): + """ + Deletes decrypted shamir keys from the database, when archive restore is stale (last decryption more than 3 days ago), + i.e. some users decrypted their key parts, but some are still missing. + Prevent decrypted shamir keys being stored in the DB forever. + """ + from reportcreator_api.pentests.models import ArchivedProject, ArchivedProjectKeyPart + + ArchivedProjectKeyPart.objects \ + .filter(decrypted_at__isnull=False) \ + .annotate(last_decrypted=Subquery( + ArchivedProjectKeyPart.objects + .filter(archived_project=OuterRef('archived_project')) + .values('archived_project') + .annotate(last_decrypted=Max('decrypted_at')) + .values_list('last_decrypted') + )) \ + .filter(last_decrypted__lt=timezone.now() - timedelta(days=3)) \ + .update(decrypted_at=None, key_part=None) diff --git a/api/src/reportcreator_api/pentests/views.py b/api/src/reportcreator_api/pentests/views.py new file mode 100644 index 0000000..f5fbc82 --- /dev/null +++ b/api/src/reportcreator_api/pentests/views.py @@ -0,0 +1,782 @@ +import contextlib +import functools +from io import BytesIO +import operator +from uuid import uuid4 +from django.forms import model_to_dict +from django.http import FileResponse, StreamingHttpResponse +from django.shortcuts import get_object_or_404 +from django.db import transaction +from django.db.models import Prefetch, Q, Exists, OuterRef, ProtectedError +from rest_framework import viewsets, mixins, status, exceptions +from rest_framework.response import Response +from rest_framework.decorators import action +from rest_framework.pagination import CursorPagination +from rest_framework.serializers import Serializer, ValidationError +from rest_framework.filters import OrderingFilter, SearchFilter +from rest_framework.settings import api_settings +from django_filters.rest_framework import DjangoFilterBackend, FilterSet, MultipleChoiceFilter, UUIDFilter + +from reportcreator_api.users.models import PentestUser +from reportcreator_api.users.views import APIBadRequestError +from reportcreator_api.utils import license +from reportcreator_api.utils.api import FileResponseAsync, GenericAPIViewAsync +from reportcreator_api.archive.import_export import export_project_types, export_projects, export_templates, import_project_types, import_projects, import_templates +from reportcreator_api.pentests.customfields.predefined_fields import FINDING_FIELDS_PREDEFINED +from reportcreator_api.pentests.customfields.types import field_definition_to_dict +from reportcreator_api.pentests.models import FindingTemplate, LockStatus, NotebookPage, PentestFinding, PentestProject, ProjectType, ProjectTypeScope, \ + ReportSection, SourceEnum, UploadedAsset, UploadedImage, ProjectMemberInfo, UploadedProjectFile, UploadedUserNotebookImage, \ + UserPublicKey, ArchivedProject, ArchivedProjectKeyPart +from reportcreator_api.pentests.permissions import ArchivedProjectKeyPartPermissions, IsTemplateEditorOrReadOnly, ProjectPermissions, \ + ProjectSubresourcePermissions, ProjectTypePermissions, ProjectTypeSubresourcePermissions, UserPublicKeyPermissions +from reportcreator_api.tasks.rendering.entry import PdfRenderingError, render_pdf, render_pdf_preview +from reportcreator_api.pentests.serializers import ArchivedProjectKeyPartDecryptSerializer, ArchivedProjectKeyPartSerializer, \ + ArchivedProjectPublicKeyEncryptedKeyPartSerializer, ArchivedProjectSerializer, CopySerializer, FindingTemplateSerializer, ImportSerializer, \ + NotebookPageSerializer, PentestFindingFromTemplateSerializer, PentestFindingSerializer, PentestProjectCreateArchiveSerializer, \ + PentestProjectReadonlySerializer, PentestProjectSerializer, PentestUserCheckArchiveSerializer, PreviewPdfOptionsSerializer, \ + ProjectNotebookPageCreateSerializer, NotebookPageSortListSerializer, ProjectTypeCreateSerializer, ProjectTypeDetailSerializer, \ + ProjectTypePreviewSerializer, ProjectTypeShortSerializer, ProjectTypeCopySerializer, PublishPdfOptionsSerializer, ReportSectionSerializer, \ + UploadedAssetSerializer, UploadedImageSerializer, PentestProjectCopySerializer, UploadedProjectFileSerilaizer, \ + UploadedUserNotebookImageSerializer, UserNotebookPageCreateSerializer, UserPublicKeyRegisterBeginSerializer, UserPublicKeySerializer +from reportcreator_api.utils.error_messages import MessageLevel, format_messages + + +class ViewSetMixinHelper: + def get_serializer_for_action(self, action, **kwargs): + action_bak = self.action + try: + self.action = action + return self.get_serializer(**kwargs) + finally: + self.action = action_bak + + +class LockableViewSetMixin(ViewSetMixinHelper): + def get_serializer_class(self): + if self.action in ['lock', 'unlock']: + return Serializer + return super().get_serializer_class() + + @action(detail=True, methods=['post']) + def lock(self, request, *args, **kwargs): + instance = self.get_object() + + lock_status = instance.lock(request.user, refresh_lock=request.data.get('refresh_lock', True)) + instance.refresh_from_db() + serializer = self.get_serializer_for_action('get', instance=instance) + return Response(serializer.data, status={ + LockStatus.CREATED: status.HTTP_201_CREATED, + LockStatus.REFRESHED: status.HTTP_200_OK, + LockStatus.FAILED: status.HTTP_403_FORBIDDEN + }[lock_status]) + + @action(detail=True, methods=['post']) + def unlock(self, request, *args, **kwargs): + instance = self.get_object() + if not instance.unlock(request.user): + raise exceptions.PermissionDenied('Could not lock object') + + serializer = self.get_serializer_for_action('get', instance=instance) + return Response(serializer.data) + + @contextlib.contextmanager + def _ensure_locked(self, instance): + was_locked = instance.is_locked + if instance.lock(self.request.user, refresh_lock=False) == LockStatus.FAILED: + raise exceptions.PermissionDenied('Could not lock object') + yield instance + if not was_locked and instance.pk is not None: + instance.unlock(self.request.user) + + def perform_update(self, serializer): + with self._ensure_locked(serializer.instance): + return super().perform_update(serializer) + + def perform_destroy(self, instance): + with self._ensure_locked(instance): + return super().perform_destroy(instance) + + +class ExportImportViewSetMixin(ViewSetMixinHelper): + def get_serializer_class(self): + if self.action == 'export': + return Serializer + elif self.action == 'import_': + return ImportSerializer + else: + return super().get_serializer_class() + + @action(detail=True, methods=['post']) + def export(self, request, **kwargs): + instance = self.get_object() + archive = self.perform_export([instance]) + return StreamingHttpResponse(streaming_content=archive, headers={ + 'Content-Type': 'application/octet-stream', + 'Content-Disposition': f'inline', + }) + + def perform_export(self, instances): + pass + + @action(detail=False, url_path='import', url_name='import', methods=['post']) + def import_(self, request, **kwargs): + import_serializer = self.get_serializer(data=request.data) + import_serializer.is_valid(raise_exception=True) + + with import_serializer.validated_data['file'].open('rb') as f: + imported_instances = self.perform_import(f, data=import_serializer.validated_data) + result_serializer = self.get_serializer_for_action('get', instance=imported_instances, many=True) + return Response(result_serializer.data, status=status.HTTP_201_CREATED) + + def perform_import(self, archive, data): + pass + + +class CopyViewSetMixin: + def get_serializer_class(self): + if self.action == 'copy': + return CopySerializer + return super().get_serializer_class() + + @action(detail=True, methods=['post']) + def copy(self, request, *args, **kwargs): + instance = self.get_object() + request_serializer = self.get_serializer(instance=instance, data=request.data) + request_serializer.is_valid(raise_exception=True) + instance_cp = request_serializer.save() + + response_serializer = self.get_serializer_for_action('get', instance=instance_cp) + return Response(response_serializer.data, status=status.HTTP_201_CREATED) + + +class ProjectTypeFilter(FilterSet): + scope = MultipleChoiceFilter(label='Scopes', choices=ProjectTypeScope.choices, method='filter_scopes') + linked_project = UUIDFilter(label='Linked project', method='filter_linked_project') + + class Meta: + model = ProjectType + fields = ['language'] + + def filter_scopes(self, queryset, name, value): + scope_filters = [] + for v in set(value): + if v == 'global': + scope_filters.append(Q(linked_project=None) & Q(linked_user=None)) + elif v == 'private': + scope_filters.append(Q(linked_user=self.request.user)) + elif v == 'project': + scope_filters.append(Q(linked_project__isnull=False)) + + return queryset.filter(functools.reduce(operator.or_, scope_filters)) + + def filter_linked_project(self, queryset, name, value): + return queryset.filter(Q(linked_project=None) | Q(linked_project_id=value)) + + +class ProjectTypeViewSetBase: + permission_classes = api_settings.DEFAULT_PERMISSION_CLASSES + [ProjectTypePermissions] + queryset = ProjectType.objects.all() + serializer_class = ProjectTypeDetailSerializer + filter_backends = [SearchFilter, DjangoFilterBackend, OrderingFilter] + search_fields = ['name'] + filterset_class = ProjectTypeFilter + ordering_fields = ['created', 'name'] + ordering = ['-created'] + + def get_serializer_class(self): + if self.action == 'list': + return ProjectTypeShortSerializer + elif self.action == 'create': + return ProjectTypeCreateSerializer + elif self.action == 'preview': + return ProjectTypePreviewSerializer + elif self.action == 'copy': + return ProjectTypeCopySerializer + return super().get_serializer_class() + + def get_queryset(self): + return super().get_queryset() \ + .select_related('lock_info_data', 'lock_info_data__user', 'linked_project', 'linked_user') \ + .prefetch_related(Prefetch('linked_project__members', queryset=ProjectMemberInfo.objects.select_related('user'))) \ + .only_permitted(self.request.user) + + +class ProjectTypeViewSet(ProjectTypeViewSetBase, LockableViewSetMixin, CopyViewSetMixin, ExportImportViewSetMixin, viewsets.ModelViewSet): + @action(detail=False, url_path='predefinedfields/findings') + def get_predefined_finding_fields(self, request, *args, **kwargs): + return Response(data=field_definition_to_dict(FINDING_FIELDS_PREDEFINED)) + + def perform_export(self, instances): + return export_project_types(instances) + + def perform_import(self, archive, data): + return import_project_types(archive) + + +class ProjectTypePreviewView(ProjectTypeViewSetBase, GenericAPIViewAsync): + _action = 'preview' + throttle_scope = 'pdf' + + async def post(self, request, *args, **kwargs): + instance = await self.aget_object() + serializer = await self.aget_valid_serializer(data=request.data) + + try: + d = serializer.validated_data + pdf_preview = await render_pdf_preview( + report_template=d['report_template'], + report_styles=d['report_styles'], + report_preview_data=d['report_preview_data'] or {}, + project_type=instance + ) + return FileResponseAsync(BytesIO(pdf_preview), content_type='application/pdf') + except PdfRenderingError as ex: + return Response(data=format_messages(ex.messages), status=status.HTTP_400_BAD_REQUEST) + + +class PentestProjectViewSetBase: + permission_classes = api_settings.DEFAULT_PERMISSION_CLASSES + [ProjectPermissions] + serializer_class = PentestProjectSerializer + filter_backends = [SearchFilter, DjangoFilterBackend] + search_fields = ['name'] + filterset_fields = ['language', 'readonly'] + + def get_serializer_class(self): + if self.action == 'generate': + return PublishPdfOptionsSerializer + elif self.action == 'preview': + return PreviewPdfOptionsSerializer + elif self.action == 'readonly': + return PentestProjectReadonlySerializer + elif self.action == 'copy': + return PentestProjectCopySerializer + elif self.action == 'upload_image_or_file': + return UploadedProjectFileSerilaizer + elif self.action in ['customize_projecttype', 'export_all', 'archive_check']: + return Serializer + elif self.action == 'archive': + return PentestProjectCreateArchiveSerializer + return super().get_serializer_class() + + def get_queryset(self): + qs = PentestProject.objects \ + .only_permitted(self.request.user) \ + .select_related('project_type') \ + .prefetch_related(Prefetch('members', queryset=ProjectMemberInfo.objects.select_related('user'))) + if self.action in ['check', 'preview', 'generate']: + qs = qs.prefetch_related('sections', 'findings') + return qs + + +class PentestProjectViewSet(PentestProjectViewSetBase, CopyViewSetMixin, ExportImportViewSetMixin, viewsets.ModelViewSet): + @action(detail=True, methods=['get']) + def check(self, request, *args, **kwargs): + instance = self.get_object() + res = instance.perform_checks() + return Response(data=format_messages(res)) + + @action(detail=True, methods=['get', 'patch', 'put']) + def readonly(self, request, *args, **kwargs): + if request.method == 'get': + return self.retrieve(request, *args, **kwargs) + else: + return self.partial_update(request, *args, **kwargs) + + @action(detail=True, url_path='customize-projecttype', methods=['post']) + def customize_projecttype(self, request, *args, **kwargs): + instance = self.get_object() + instance.project_type = instance.project_type.copy( + name='Customization of ' + instance.project_type.name, + source=SourceEnum.CUSTOMIZED, + linked_project=instance, + linked_user=None) + instance.save() + return Response(data={'project_type': instance.project_type.id}) + + @action(detail=True, url_path='upload', methods=['post']) + def upload_image_or_file(self, request, *args, **kwargs): + # First try saving an image, then saving as a regular file + serializer_context = self.get_serializer_context() | {'project': self.get_object()} + serializer = UploadedImageSerializer(data=request.data, context=serializer_context) + if not serializer.is_valid(raise_exception=False): + serializer = UploadedProjectFileSerilaizer(data=request.data, context=serializer_context) + serializer.is_valid(raise_exception=True) + + serializer.save() + return Response(data=serializer.data, status=status.HTTP_201_CREATED) + + @action(detail=True, methods=['post'], url_path='export/all') + def export_all(self, *args, **kwargs): + return self.export(*args, **kwargs) + + def perform_export(self, instances): + return export_projects(instances, export_all=self.action == 'export_all') + + def perform_import(self, archive, data): + projects = import_projects(archive) + PentestProject.objects.add_member(user=self.request.user, projects=projects) + return projects + + @action(detail=True, methods=['post']) + def archive(self, request, *args, **kwargs): + project = self.get_object() + serializer = self.get_serializer(data=request.data, instance=project) + serializer.is_valid(raise_exception=True) + archive = serializer.save() + + archive_serializer = ArchivedProjectSerializer(instance=archive, context=self.get_serializer_context()) + return Response(data=archive_serializer.data, status=status.HTTP_201_CREATED) + + @action(detail=True, url_path='archive-check', methods=['get']) + def archive_check(self, request, *args, **kwargs): + project = self.get_object() + users = ArchivedProject.objects \ + .get_possible_archive_users_for_project(project) \ + .annotate_has_public_keys() \ + .annotate(is_project_member=Exists(PentestUser.objects.filter(projectmemberinfo__project=project).filter(pk=OuterRef('pk')))) + return Response(data={ + 'users': PentestUserCheckArchiveSerializer(instance=users, many=True).data + }) + + +class PentestProjectPreviewView(PentestProjectViewSetBase, GenericAPIViewAsync): + _action = 'preview' + throttle_scope = 'pdf' + + async def post(self, request, *args, **kwargs): + instance = await self.aget_object() + serializer = await self.aget_valid_serializer(instance, data=request.data) + options = serializer.validated_data + + try: + pdf_preview = await render_pdf(project=instance, **options) + return FileResponseAsync(BytesIO(pdf_preview), content_type='application/pdf') + except PdfRenderingError as ex: + return Response(data=format_messages(ex.messages), status=status.HTTP_400_BAD_REQUEST) + + +class PentestProjectGenerateView(PentestProjectViewSetBase, GenericAPIViewAsync): + _action = 'generate' + throttle_scope = 'pdf' + + async def post(self, request, *args, **kwargs): + instance = await self.aget_object() + serializer = await self.aget_valid_serializer(instance, data=request.data) + options = serializer.validated_data + + # Check for errors + if (messages := format_messages(instance.perform_checks())) and messages.get(MessageLevel.ERROR.value): + return Response(data=messages, status=status.HTTP_400_BAD_REQUEST) + + # Generate final report; optionally encrypt PDF if a password was supplied + try: + pdf = await render_pdf(project=instance, password=options.get('password')) + return FileResponseAsync(BytesIO(pdf), content_type='application/pdf') + except PdfRenderingError as ex: + return Response(data=format_messages(ex.messages), status=status.HTTP_400_BAD_REQUEST) + + +class PentestFindingViewSet(LockableViewSetMixin, viewsets.ModelViewSet): + permission_classes = api_settings.DEFAULT_PERMISSION_CLASSES + [ProjectSubresourcePermissions] + serializer_class = PentestFindingSerializer + pagination_class = None + lookup_field = 'finding_id' + + def get_serializer_class(self): + if self.action == 'fromtemplate': + return PentestFindingFromTemplateSerializer + return super().get_serializer_class() + + @functools.cache + def get_project(self): + qs = PentestProject.objects \ + .only_permitted(self.request.user) \ + .select_related('project_type') + return get_object_or_404(qs, pk=self.kwargs['project_pk']) + + def get_serializer_context(self): + return super().get_serializer_context() | { + 'project': self.get_project(), + } + + def get_queryset(self): + return PentestFinding.objects \ + .only_permitted(self.request.user) \ + .filter(project=self.kwargs['project_pk']) \ + .select_related('project__project_type', 'lock_info_data', 'lock_info_data__user', 'assignee') + + @action(detail=False, methods=['post']) + def fromtemplate(self, request, *args, **kwargs): + return super().create(request, *args, **kwargs) + + +class ReportSectionViewSet(LockableViewSetMixin, mixins.ListModelMixin, mixins.RetrieveModelMixin, mixins.UpdateModelMixin, viewsets.GenericViewSet): + permission_classes = api_settings.DEFAULT_PERMISSION_CLASSES + [ProjectSubresourcePermissions] + serializer_class = ReportSectionSerializer + pagination_class = None + lookup_field = 'section_id' + + def get_queryset(self): + qs = ReportSection.objects \ + .only_permitted(self.request.user) \ + .filter(project__id=self.kwargs['project_pk']) \ + .select_related('project__project_type', 'lock_info_data', 'lock_info_data__user', 'assignee') + if self.request.method in ['PUT', 'PATCH']: + # Lock project with select_for_update because all section fields of a report are stored in the same DB entity (project). + # Locking does not prevent race conditions on updating because multiple locks can exist for the same DB object. + # Therefore updates need to be serialized with select_for_update such that no race conditions occur. + qs = qs.select_for_update(of=['self', 'project']) + return qs + + @functools.cache + def get_project(self): + qs = PentestProject.objects \ + .only_permitted(self.request.user) \ + .select_related('project_type') + return get_object_or_404(qs, pk=self.kwargs['project_pk']) + + def get_serializer_context(self): + return super().get_serializer_context() | { + 'project': self.get_project(), + } + + def list(self, *args, **kwargs): + sections = self.get_queryset() + # Sort sections + if sections: + section_order = [s.get('id') for s in sections[0].project_type.report_sections] + sections = sorted(sections, key=lambda s: section_order.index(str(s.section_id))) + + serializer = self.get_serializer(sections, many=True) + return Response(serializer.data) + + def update(self, request, *args, **kwargs): + with transaction.atomic(): + return super().update(request, *args, **kwargs) + + +class ArchivedProjectViewSet(mixins.ListModelMixin, mixins.RetrieveModelMixin, viewsets.GenericViewSet): + permission_classes = api_settings.DEFAULT_PERMISSION_CLASSES + [license.ProfessionalLicenseRequired] + serializer_class = ArchivedProjectSerializer + filter_backends = [SearchFilter] + search_fields = ['name'] + + def get_queryset(self): + return ArchivedProject.objects \ + .only_permitted(self.request.user) \ + .prefetch_related(Prefetch('key_parts', ArchivedProjectKeyPart.objects.select_related('user'))) + + +class ArchivedProjectKeyPartViewSet(mixins.ListModelMixin, mixins.RetrieveModelMixin, viewsets.GenericViewSet): + permission_classes = api_settings.DEFAULT_PERMISSION_CLASSES + [ArchivedProjectKeyPartPermissions, license.ProfessionalLicenseRequired] + serializer_class = ArchivedProjectKeyPartSerializer + pagination_class = None + + @functools.cache + def get_archived_project(self): + qs = ArchivedProject.objects \ + .only_permitted(self.request.user) + return get_object_or_404(qs, pk=self.kwargs['archivedproject_pk']) + + def get_queryset(self): + return self.get_archived_project().key_parts \ + .select_related('user') + + def get_serializer_class(self): + if self.action == 'public_key_encrypted_data': + return ArchivedProjectPublicKeyEncryptedKeyPartSerializer + elif self.action == 'decrypt': + return ArchivedProjectKeyPartDecryptSerializer + return super().get_serializer_class() + + def get_serializer_context(self): + return super().get_serializer_context() | { + 'archived_project': self.get_archived_project() + } + + @action(detail=True, url_path='public-key-encrypted-data', methods=['get']) + def public_key_encrypted_data(self, request, *args, **kwargs): + qs = self.get_object().public_key_encrypted_parts \ + .select_related('public_key') + serializer = self.get_serializer(instance=qs, many=True) + return Response(serializer.data) + + @action(detail=True, methods=['post']) + def decrypt(self, request, *args, **kwargs): + instance = self.get_object() + serializer = self.get_serializer(instance=instance, data=request.data) + serializer.is_valid(raise_exception=True) + data = serializer.save() + return Response(data) + + +class NotebookPageViewSetBase(LockableViewSetMixin, viewsets.ModelViewSet): + pagination_class = None + lookup_field = 'note_id' + serializer_class = NotebookPageSerializer + create_serializer_class = None + + def get_queryset(self): + return NotebookPage.objects \ + .select_related('parent', 'lock_info_data', 'lock_info_data__user') \ + .order_by('parent', 'order') + + def get_serializer_class(self): + if self.action == 'sort': + return NotebookPageSortListSerializer + elif self.action == 'create': + return self.create_serializer_class + return super().get_serializer_class() + + @action(detail=False, methods=['post']) + @transaction.atomic + def sort(self, request, *arg, **kwargs): + serializer = self.get_serializer(instance=list(self.get_queryset()), data=request.data) + serializer.is_valid(raise_exception=True) + serializer.save() + return Response(data=serializer.data) + + +class ProjectNotebookPageViewSet(NotebookPageViewSetBase): + permission_classes = api_settings.DEFAULT_PERMISSION_CLASSES + [ProjectSubresourcePermissions] + create_serializer_class = ProjectNotebookPageCreateSerializer + + @functools.cache + def get_project(self): + qs = PentestProject.objects \ + .only_permitted(self.request.user) \ + .select_related('project_type') + return get_object_or_404(qs, pk=self.kwargs['project_pk']) + + def get_queryset(self): + return super().get_queryset() \ + .filter(project=self.get_project()) + + def get_serializer_context(self): + return super().get_serializer_context() | { + 'project': self.get_project() + } + + +class UserNotebookPageViewSet(NotebookPageViewSetBase): + create_serializer_class = UserNotebookPageCreateSerializer + + def get_user(self): + return self.request.user + + def get_queryset(self): + return super().get_queryset() \ + .filter(user=self.get_user()) + + def get_serializer_context(self): + return super().get_serializer_context() | { + 'user': self.get_user() + } + + +class UploadedFileViewSetMixin: + @action(detail=False, url_path='name/(?P[^/]+)') + def retrieve_by_name(self, request, *args, **kwargs): + queryset = self.filter_queryset(self.get_queryset()) + instance = get_object_or_404(queryset.filter_name(kwargs['filename'])) + self.check_object_permissions(request, instance) + return FileResponse(instance.file.open(), filename=instance.name) + + +class UploadedImageViewSet(UploadedFileViewSetMixin, viewsets.ModelViewSet): + permission_classes = api_settings.DEFAULT_PERMISSION_CLASSES + [ProjectSubresourcePermissions] + serializer_class = UploadedImageSerializer + + def get_queryset(self): + return UploadedImage.objects \ + .only_permitted(self.request.user) \ + .filter(linked_object=self.kwargs['project_pk']) + + @functools.cache + def get_project(self): + qs = PentestProject.objects \ + .only_permitted(self.request.user) + return get_object_or_404(qs, pk=self.kwargs['project_pk']) + + def get_serializer_context(self): + return super().get_serializer_context() | { + 'project': self.get_project(), + } + + +class UploadedProjectFileViewSet(UploadedImageViewSet): + serializer_class = UploadedProjectFileSerilaizer + + def get_queryset(self): + return UploadedProjectFile.objects \ + .only_permitted(self.request.user) \ + .filter(linked_object=self.kwargs['project_pk']) + + +class UploadedUserNotebookImageViewSet(UploadedFileViewSetMixin, viewsets.ModelViewSet): + serializer_class = UploadedUserNotebookImageSerializer + + def get_user(self): + return self.request.user + + def get_queryset(self): + return UploadedUserNotebookImage.objects \ + .only_permitted(self.request.user) \ + .filter(linked_object=self.get_user()) + + def get_serializer_context(self): + return super().get_serializer_context() | { + 'user': self.get_user() + } + + +class UploadedAssetViewSet(UploadedFileViewSetMixin, viewsets.ModelViewSet): + permission_classes = api_settings.DEFAULT_PERMISSION_CLASSES + [ProjectTypeSubresourcePermissions] + serializer_class = UploadedAssetSerializer + + def get_queryset(self): + return UploadedAsset.objects \ + .only_permitted(self.request.user) \ + .filter(linked_object=self.kwargs['projecttype_pk']) + + @functools.cache + def get_project_type(self): + qs = ProjectType.objects \ + .only_permitted(self.request.user) \ + .select_related('linked_project', 'linked_user') \ + .prefetch_related(Prefetch('linked_project__members', queryset=ProjectMemberInfo.objects.select_related('user'))) + return get_object_or_404(qs, pk=self.kwargs['projecttype_pk']) + + def get_serializer_context(self): + return super().get_serializer_context() | { + 'project_type': self.get_project_type(), + } + + +class FindingTemplatePagination(CursorPagination): + def get_ordering(self, request, queryset, view): + ordering_filters = [ + filter_cls for filter_cls in getattr(view, 'filter_backends', []) + if hasattr(filter_cls, 'get_queryset_ordering') + ] + if ordering_filters: + return ordering_filters[0]().get_queryset_ordering(request, queryset, view) + return super().get_ordering() + + +class FindingTemplateOrderingFilter(OrderingFilter): + ordering_fields = ['risk', 'usage'] + + def get_queryset_ordering(self, request, queryset, view): + ordering = self.get_ordering(request, queryset, view)[0] + if ordering == 'risk': + return ['risk_score', 'created'] + elif ordering == '-risk': + return ['-risk_score', '-created'] + elif ordering == 'usage': + return ['usage_count', 'risk_score', 'created'] + elif ordering == '-usage': + return ['-usage_count', '-risk_score', '-created'] + else: + return None + + def filter_queryset(self, request, queryset, view): + ordering = self.get_queryset_ordering(request, queryset, view) + if ordering: + return queryset.order_by(*ordering) + return queryset + + def get_default_ordering(self, view): + return ['-risk'] + + +class FindingTemplateViewSet(LockableViewSetMixin, ExportImportViewSetMixin, viewsets.ModelViewSet): + queryset = FindingTemplate.objects.select_related('lock_info_data', 'lock_info_data__user').all() + serializer_class = FindingTemplateSerializer + filter_backends = [SearchFilter, DjangoFilterBackend, FindingTemplateOrderingFilter] + search_fields = ['title', 'tags', 'language'] + filterset_fields = ['language'] + pagination_class = FindingTemplatePagination + permission_classes = api_settings.DEFAULT_PERMISSION_CLASSES + [IsTemplateEditorOrReadOnly] + + def paginate_queryset(self, queryset): + return super().paginate_queryset(queryset) + + @action(detail=False) + def fielddefinition(self, request, *args, **kwargs): + return Response(data=field_definition_to_dict(FindingTemplate.field_definition)) + + def perform_export(self, instances): + return export_templates(instances) + + def perform_import(self, archive, data): + return import_templates(archive) + + +class UserPublicKeyViewSet(mixins.ListModelMixin, mixins.RetrieveModelMixin, mixins.UpdateModelMixin, mixins.DestroyModelMixin, viewsets.GenericViewSet): + serializer_class = UserPublicKeySerializer + permission_classes = api_settings.DEFAULT_PERMISSION_CLASSES + [UserPublicKeyPermissions, license.ProfessionalLicenseRequired] + pagination_class = None + + @functools.cache + def get_user(self): + user_pk = self.kwargs['pentestuser_pk'] + if user_pk == 'self': + return self.request.user + + qs = PentestUser.objects.all() + return get_object_or_404(qs, pk=user_pk) + + def get_queryset(self): + return self.get_user().public_keys.all() + + def get_serializer_class(self): + if self.action == 'register_begin': + return UserPublicKeyRegisterBeginSerializer + return super().get_serializer_class() + + def get_serializer_context(self): + return super().get_serializer_context() | { + 'user': self.get_user() + } + + @action(detail=False, url_path='register/begin', methods=['post']) + def register_begin(self, request, *args, **kwargs): + serializer = self.get_serializer(data=request.data) + serializer.is_valid(raise_exception=True) + instance = serializer.save() + + # Verify that users can decrypt with the given public key before registering + test_data = 'key-verification-' + str(uuid4()) + '\n' + verification_encrypted = instance.encrypt(test_data.encode()) + self.request.session['public_key_register'] = { + 'instance': model_to_dict(instance), + 'verification': test_data, + } + return Response(data={ + 'status': 'verify-key', + 'public_key_info': instance.public_key_info, + 'verification': verification_encrypted, + }) + + @action(detail=False, url_path='register/complete', methods=['post']) + def register_complete(self, request, *args, **kwargs): + public_key_register_state = request.session.get('public_key_register') + if not public_key_register_state: + raise APIBadRequestError('No public key registration in progress') + if public_key_register_state['verification'].strip() != request.data.get('verification', '').strip(): + raise ValidationError('Invalid verification code') + + instance = UserPublicKey(**public_key_register_state['instance']) + instance.user = self.get_user() + instance.save() + serializer = self.get_serializer(instance=instance) + return Response(data=serializer.data, status=status.HTTP_201_CREATED) + + def perform_destroy(self, instance): + try: + instance.delete() + except ProtectedError: + raise ValidationError( + detail='Cannot delete this public key because some archives are encrypted with it. ' + + 'You can disable it to not be used for archiving in the future.') diff --git a/api/src/reportcreator_api/tasks/__init__.py b/api/src/reportcreator_api/tasks/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/api/src/reportcreator_api/tasks/admin.py b/api/src/reportcreator_api/tasks/admin.py new file mode 100644 index 0000000..cab52ff --- /dev/null +++ b/api/src/reportcreator_api/tasks/admin.py @@ -0,0 +1,10 @@ +from django.contrib import admin + +from reportcreator_api.utils.admin import BaseAdmin +from reportcreator_api.tasks.models import PeriodicTask + + +@admin.register(PeriodicTask) +class NotificationSpecAdmin(BaseAdmin): + pass + diff --git a/api/src/reportcreator_api/tasks/apps.py b/api/src/reportcreator_api/tasks/apps.py new file mode 100644 index 0000000..961e733 --- /dev/null +++ b/api/src/reportcreator_api/tasks/apps.py @@ -0,0 +1,6 @@ +from django.apps import AppConfig + + +class NotificationsConfig(AppConfig): + default_auto_field = 'django.db.models.BigAutoField' + name = 'reportcreator_api.tasks' diff --git a/api/src/reportcreator_api/tasks/migrations/0001_initial.py b/api/src/reportcreator_api/tasks/migrations/0001_initial.py new file mode 100644 index 0000000..48e861c --- /dev/null +++ b/api/src/reportcreator_api/tasks/migrations/0001_initial.py @@ -0,0 +1,33 @@ +# Generated by Django 4.1.5 on 2023-01-26 08:29 + +from django.db import migrations, models +import django.utils.timezone +import reportcreator_api.utils.models + + +class Migration(migrations.Migration): + + initial = True + + dependencies = [ + ] + + operations = [ + migrations.CreateModel( + name='PeriodicTask', + fields=[ + ('created', models.DateTimeField(default=reportcreator_api.utils.models.now, editable=False)), + ('updated', models.DateTimeField(auto_now=True)), + ('id', models.CharField(max_length=255, primary_key=True, serialize=False)), + ('status', models.CharField(choices=[('running', 'Running'), ('success', 'Success'), ('failed', 'Failed')], default='running', max_length=10)), + ('started', models.DateTimeField(default=django.utils.timezone.now)), + ('completed', models.DateTimeField(blank=True, null=True)), + ('last_success', models.DateTimeField(blank=True, null=True)), + ], + options={ + 'ordering': ['-created'], + 'abstract': False, + }, + bases=(reportcreator_api.utils.models.ModelDiffMixin, models.Model), + ), + ] diff --git a/api/src/reportcreator_api/tasks/migrations/__init__.py b/api/src/reportcreator_api/tasks/migrations/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/api/src/reportcreator_api/tasks/models.py b/api/src/reportcreator_api/tasks/models.py new file mode 100644 index 0000000..0926c92 --- /dev/null +++ b/api/src/reportcreator_api/tasks/models.py @@ -0,0 +1,21 @@ +from django.db import models +from django.utils import timezone + +from reportcreator_api.utils.models import BaseModel +from reportcreator_api.tasks import querysets + + +class TaskStatus(models.TextChoices): + RUNNING = 'running', 'Running' + SUCCESS = 'success', 'Success' + FAILED = 'failed', 'Failed' + + +class PeriodicTask(BaseModel): + id = models.CharField(max_length=255, primary_key=True) + status = models.CharField(max_length=10, choices=TaskStatus.choices, default=TaskStatus.RUNNING) + started = models.DateTimeField(default=timezone.now) + completed = models.DateTimeField(null=True, blank=True) + last_success = models.DateTimeField(null=True, blank=True) + + objects = querysets.PeriodicTaskManager() diff --git a/api/src/reportcreator_api/tasks/querysets.py b/api/src/reportcreator_api/tasks/querysets.py new file mode 100644 index 0000000..5b0b30c --- /dev/null +++ b/api/src/reportcreator_api/tasks/querysets.py @@ -0,0 +1,79 @@ +import logging +import elasticapm +from asgiref.sync import sync_to_async, iscoroutinefunction +from datetime import timedelta +from django.db import models, IntegrityError +from django.db.models.functions import Rank +from django.conf import settings +from django.utils import timezone +from django.utils.module_loading import import_string + +log = logging.getLogger(__name__) + + +class PeriodicTaskQuerySet(models.QuerySet): + def get_pending_tasks(self): + from reportcreator_api.tasks.models import TaskStatus + pending_tasks = {t['id']: t.copy() for t in settings.PERIODIC_TASKS} + for t in self.filter(id__in=pending_tasks.keys()): + pending_tasks[t.id]['model'] = t + # Remove non-pending tasks + if (t.status == TaskStatus.RUNNING and t.started > timezone.now() - timedelta(minutes=10)) or \ + (t.status == TaskStatus.FAILED and t.started > timezone.now() - timedelta(minutes=10)) or \ + (t.status == TaskStatus.SUCCESS and t.started > timezone.now() - pending_tasks[t.id]['schedule']): + del pending_tasks[t.id] + return pending_tasks.values() + + +class PeriodicTaskManager(models.Manager.from_queryset(PeriodicTaskQuerySet)): + async def run_task(self, task_info): + from reportcreator_api.tasks.models import PeriodicTask, TaskStatus + + # Lock task + if task_info.get('model'): + started = timezone.now() + res = await PeriodicTask.objects \ + .filter(id=task_info['id']) \ + .filter(status=task_info['model'].status) \ + .filter(started=task_info['model'].started) \ + .filter(completed=task_info['model'].completed) \ + .aupdate(status=TaskStatus.RUNNING, started=started, completed=None) + if res != 1: + return + task_info['model'].status = TaskStatus.RUNNING + task_info['model'].started = started + task_info['model'].completed = None + else: + try: + task_info['model'] = await PeriodicTask.objects.acreate( + id=task_info['id'], + status=TaskStatus.RUNNING, + started=timezone.now(), + completed=None + ) + except IntegrityError: + return + + # Execute task + log.info(f'Starting periodic task "{task_info["id"]}"') + try: + task_fn = import_string(task_info['task']) + async with elasticapm.async_capture_span(task_info['id']): + if iscoroutinefunction(task_fn): + await task_fn(task_info) + else: + await sync_to_async(task_fn)(task_info) + task_info['model'].status = TaskStatus.SUCCESS + task_info['model'].last_success = timezone.now() + task_info['model'].completed = task_info['model'].last_success + except Exception: + logging.exception(f'Error while running periodic task "{task_info["id"]}"') + task_info['model'].status = TaskStatus.FAILED + task_info['model'].completed = timezone.now() + log.info(f'Completed periodic task "{task_info["id"]}" with status "{task_info["model"].status}"') + + await task_info['model'].asave() + + async def run_all_pending_tasks(self): + for t in await sync_to_async(self.get_pending_tasks)(): + await self.run_task(t) diff --git a/api/src/reportcreator_api/tasks/rendering/__init__.py b/api/src/reportcreator_api/tasks/rendering/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/api/src/reportcreator_api/tasks/rendering/celery_worker.py b/api/src/reportcreator_api/tasks/rendering/celery_worker.py new file mode 100644 index 0000000..f7e5ce3 --- /dev/null +++ b/api/src/reportcreator_api/tasks/rendering/celery_worker.py @@ -0,0 +1,85 @@ +import gc +from pathlib import Path +from celery import Celery, signals +from celery.backends.base import Backend +from celery.backends.rpc import RPCBackend, ResultConsumer +from kombu import pools +from django.conf import settings + + +class SecureWorkerFixup: + """ + Restart celery worker container after every task. + """ + + def __init__(self, app: Celery) -> None: + self.app = app + if settings.CELERY_SECURE_WORKER: + self.install() + + def install(self): + signals.task_prerun.connect(self.on_task_prerun) + + def on_task_prerun(self, task, *args, **kwargs): + # Restart worker + self.app.control.shutdown(destination=[task.request.hostname]) + + # Close broker connection pool + self.app.pool.force_close_all() + pools.reset() + + # Remove credentials + settings.CELERY_BROKER_URL = None + if settings.CELERY_BROKER_URL_FILE: + Path(settings.CELERY_BROKER_URL_FILE).unlink() + gc.collect() + + +class CustomRPCResultConsumer(ResultConsumer): + def start(self, initial_task_id, no_ack=True, **kwargs): + self._connection = self.backend.connection() + initial_queue = self._create_binding(initial_task_id) + self._consumer = self.Consumer( + channel=self._connection.default_channel, + queues=[initial_queue], + callbacks=[self.on_state_change], + no_ack=no_ack, + accept=self.accept + ) + self._consumer.consume() + + +class CustomRPCBackend(RPCBackend): + """ + Celery RPC result backend which uses a separate amqp connection for sending results. + """ + + ResultConsumer = CustomRPCResultConsumer + + def connection(self): + return self.app.amqp.Connection( + self.url.replace('reportcreator_api.tasks.rendering.celery_worker:CustomRPCBackend', 'pyamqp'), + connect_timeout=self.app.conf.broker_connection_timeout + ) + + def store_result(self, task_id, result, state, traceback=None, request=None, **kwargs): + routing_key, correlation_id = self.destination_for(task_id, request) + if not routing_key: + return + + with self.connection() as conn: + with self.app.amqp.Producer(conn) as producer: + producer.publish( + self._to_result(task_id, state, result, traceback, request), + exchange=self.exchange, + routing_key=routing_key, + correlation_id=correlation_id, + serializer=self.serializer, + retry=True, retry_policy=self.retry_policy, + declare=self.on_reply_declare(task_id), + delivery_mode=self.delivery_mode, + ) + return result + + def as_uri(self, include_password=True): + return Backend.as_uri(self, include_password) diff --git a/api/src/reportcreator_api/tasks/rendering/entry.py b/api/src/reportcreator_api/tasks/rendering/entry.py new file mode 100644 index 0000000..d25e435 --- /dev/null +++ b/api/src/reportcreator_api/tasks/rendering/entry.py @@ -0,0 +1,174 @@ +import dataclasses +import logging +import uuid +import asyncio +import elasticapm +from asgiref.sync import sync_to_async +from types import NoneType +from typing import Any, Optional, Union +from base64 import b64encode, b64decode + +from reportcreator_api.tasks.rendering import tasks +from reportcreator_api.pentests import cvss +from reportcreator_api.pentests.customfields.types import FieldDataType, FieldDefinition, EnumChoice +from reportcreator_api.pentests.customfields.utils import HandleUndefinedFieldsOptions, ensure_defined_structure +from reportcreator_api.utils.error_messages import ErrorMessage, MessageLevel, MessageLocationInfo, MessageLocationType +from reportcreator_api.pentests.models import PentestProject, ProjectType, ProjectMemberInfo +from reportcreator_api.utils.error_messages import ErrorMessage +from reportcreator_api.utils.utils import copy_keys, get_key_or_attr +from reportcreator_api.utils.logging import log_timing + + +log = logging.getLogger(__name__) + + +class PdfRenderingError(Exception): + def __init__(self, messages) -> None: + super().__init__(messages) + self.messages = messages + + +def format_template_field_object(value: dict, definition: dict[str, FieldDefinition], imported_members: Optional[list[dict]] = None, require_id=False): + out = value | ensure_defined_structure(value=value, definition=definition) + for k, d in (definition or {}).items(): + out[k] = format_template_field(value=out.get(k), definition=d, imported_members=imported_members) + + if require_id and 'id' not in out: + out['id'] = str(uuid.uuid4()) + return out + + +def format_template_field_user(value: Union[ProjectMemberInfo, str, uuid.UUID, None], imported_members: Optional[list[dict]] = None): + def format_user(u: Union[ProjectMemberInfo, dict, None]): + if not u: + return None + return copy_keys( + u.user if isinstance(u, ProjectMemberInfo) else u, + ['id', 'name', 'title_before', 'first_name', 'middle_name', 'last_name', 'title_after', 'email', 'phone', 'mobile']) | \ + {'roles': list(set(filter(None, get_key_or_attr(u, 'roles', []))))} + + if isinstance(value, (ProjectMemberInfo, NoneType)): + return format_user(value) + elif u := next(filter(lambda i: str(i.get('id')) == str(value), imported_members or []), None): + return format_user(u) + else: + return format_user(ProjectMemberInfo.objects.filter(id=value).first()) + + +def format_template_field(value: Any, definition: FieldDefinition, imported_members: Optional[list[dict]] = None): + value_type = definition.type + if value_type == FieldDataType.ENUM: + return dataclasses.asdict(next(filter(lambda c: c.value == value, definition.choices), EnumChoice(value='', label=''))) + elif value_type == FieldDataType.CVSS: + score = cvss.calculate_score(value) + return { + 'vector': value, + 'score': str(round(score, 2)), + 'level': cvss.level_from_score(score).value, + 'level_number': cvss.level_number_from_score(score) + } + elif value_type == FieldDataType.USER: + return format_template_field_user(value, imported_members=imported_members) + elif value_type == FieldDataType.LIST: + return [format_template_field(value=e, definition=definition.items, imported_members=imported_members) for e in value] + elif value_type == FieldDataType.OBJECT: + return format_template_field_object(value=value, definition=definition.properties, imported_members=imported_members) + else: + return value + + +def format_template_data(data: dict, project_type: ProjectType, imported_members: Optional[list[dict]] = None): + data['report'] = format_template_field_object( + value=ensure_defined_structure( + value=data.get('report', {}), + definition=project_type.report_fields_obj, + handle_undefined=HandleUndefinedFieldsOptions.FILL_DEFAULT), + definition=project_type.report_fields_obj, + imported_members=imported_members, + require_id=True) + data['findings'] = sorted([ + format_template_field_object( + value=(f if isinstance(f, dict) else {}) | ensure_defined_structure( + value=f, + definition=project_type.finding_fields_obj, + handle_undefined=HandleUndefinedFieldsOptions.FILL_DEFAULT), + definition=project_type.finding_fields_obj, + imported_members=imported_members, + require_id=True) + for f in data.get('findings', [])], + key=lambda f: (-float(f.get('cvss', {}).get('score', 0)), f.get('created'), f.get('id'))) + data['pentesters'] = data.get('pentesters', []) + (imported_members or []) + return data + + +async def get_celery_result_async(task): + while not task.ready(): + await asyncio.sleep(0.2) + return task.get() + + +@elasticapm.async_capture_span() +async def render_pdf_task(project_type: ProjectType, report_template: str, report_styles: str, data: dict, password: Optional[str] = None, project: Optional[PentestProject] = None): + task = await sync_to_async(tasks.render_pdf_task.delay)( + template=report_template, + styles=report_styles, + data=data, + language=project.language if project else project_type.language, + password=password, + resources= + {'/assets/name/' + a.name: b64encode(a.file.read()).decode() async for a in project_type.assets.all()} | + ({'/images/name/' + i.name: b64encode(i.file.read()).decode() async for i in project.images.all()} if project else {}) + ) + res = await get_celery_result_async(task) + + if not res.get('pdf'): + raise PdfRenderingError([ErrorMessage( + level=MessageLevel(m.get('level')), + location=MessageLocationInfo(type=MessageLocationType.DESIGN, id=str(project_type.id), name=project_type.name), + message=m.get('message'), + details=m.get('details') + ) for m in res.get('messages', [])]) + return b64decode(res.get('pdf')) + + +async def render_pdf(project: PentestProject, project_type: Optional[ProjectType] = None, report_template: Optional[str] = None, report_styles: Optional[str] = None, password: Optional[str] = None) -> bytes: + if not project_type: + project_type = project.project_type + if not report_template: + report_template = project_type.report_template + if not report_styles: + report_styles = project_type.report_styles + + data = { + 'report': { + 'id': str(project.id), + **project.data, + }, + 'findings': [{ + 'id': str(f.finding_id), + 'created': str(f.created), + **f.data, + } async for f in project.findings.all()], + 'pentesters': [await sync_to_async(format_template_field_user)(u) async for u in project.members.all()], + } + data = await sync_to_async(format_template_data)(data=data, project_type=project_type, imported_members=project.imported_members) + return await render_pdf_task( + project=project, + project_type=project_type, + report_template=report_template, + report_styles=report_styles, + data=data, + password=password + ) + +async def render_pdf_preview(project_type: ProjectType, report_template: str, report_styles: str, report_preview_data: dict) -> bytes: + preview_data = report_preview_data.copy() + data = await sync_to_async(format_template_data)(data=preview_data, project_type=project_type) + + return await render_pdf_task( + project_type=project_type, + report_template=report_template, + report_styles=report_styles, + data=data + ) + diff --git a/api/src/reportcreator_api/tasks/rendering/render.py b/api/src/reportcreator_api/tasks/rendering/render.py new file mode 100644 index 0000000..6b69596 --- /dev/null +++ b/api/src/reportcreator_api/tasks/rendering/render.py @@ -0,0 +1,191 @@ +import json +import logging +from playwright.sync_api import sync_playwright +from typing import Optional +from base64 import b64decode +from html import escape as html_escape +from io import BytesIO +from pikepdf import Pdf, Encryption +from contextlib import contextmanager +from weasyprint import HTML, CSS, default_url_fetcher +from weasyprint.text.fonts import FontConfiguration +from weasyprint.urls import URLFetchingError +from django.core.serializers.json import DjangoJSONEncoder + +from django.conf import settings +from reportcreator_api.utils.logging import log_timing + + +@contextmanager +def get_page(): + with sync_playwright() as playwright: + with playwright.chromium.launch( + executable_path=settings.CHROMIUM_EXECUTABLE, + args=['--single-process'], + headless=True, + chromium_sandbox=False, + handle_sigint=False, + handle_sigterm=False, + handle_sighup=False + ) as browser: + with browser.new_context() as context: + yield context.new_page() + + +def get_render_script(): + return (settings.PDF_RENDER_SCRIPT_PATH).read_text() + + +@log_timing +def render_to_html(template: str, data: dict, language: str) -> tuple[Optional[str], list[dict]]: + messages = [] + html = None + + try: + with get_page() as page: + console_output = [] + page.on('console', lambda l: console_output.append(l)) + page.on('pageerror', lambda exc: messages.append({'level': 'error', 'message': 'Uncaught error during template rendering', 'details': str(exc)})) + page.on('requestfailed', lambda request: messages.append({'level': 'error', 'message': 'Request failed', 'details': f'Request to URL {request.url} failed: {request.failure.error_text}'})) + page.set_content(f""" + + + + + {html_escape(data.get('title', ''))} + + + + """) + + # set global window variables + page.evaluate(f"""() => {{ + window.REPORT_TEMPLATE = {json.dumps(template, cls=DjangoJSONEncoder)}; + window.REPORT_DATA = {json.dumps(data, cls=DjangoJSONEncoder)}; + }}""") + + page.add_script_tag(content=get_render_script()) + + # Wait for template to finish rendering + page.wait_for_function("""window.RENDERING_COMPLETED === true"""); + + # Format messages + for m in console_output: + msg = { + 'level': m.type, + 'message': m.text, + 'details': None + } + if len(m.args) == 2 and (error_data := m.args[1].json_value()) and 'message' in error_data: + msg |= { + 'message': error_data['message'], + 'details': error_data.get('details'), + } + if msg['level'] in ['error', 'warning', 'info']: + messages.append(msg) + + if not any(map(lambda m: m['level'] == 'error', messages)): + # Remove script tag from HTML output + page.evaluate("""() => document.head.querySelectorAll('script').forEach(s => s.remove())""") + # Get rendered HTML + html = page.content() + except Exception as ex: + messages.append({ + 'level': 'error', + 'message': 'Error rendering HTML template', + 'details': None, + }) + + if messages: + logging.info(f'Chromium messages: {messages}') + + return html, messages + + +def weasyprint_strip_pdf_metadata(doc, pdf): + # remove Producer meta-data info from PDF + del pdf.info['Producer'] + + +@log_timing +def render_to_pdf(html_content: str, css_styles: str, resources: dict[str, str]) -> tuple[Optional[bytes], list[dict]]: + messages = [] + + def weasyprint_url_fetcher(url, timeout=10, ssl_context=None): + # allow data URLs + if url.startswith('data:'): + return default_url_fetcher(url=url, timeout=timeout, ssl_context=ssl_context) + # allow loading from the resource list + elif url in resources: + return { + 'filename': url.split('/')[-1], + 'file_obj': BytesIO(b64decode(resources[url])), + } + elif url.startswith('/'): + messages.append({ + 'level': 'error', + 'message': 'Resource not found', + 'details': f'Could not find resource for URL "{url}". Check if the URL is correct and the resource exists on the server.', + }) + raise URLFetchingError('Resource not found') + else: + # block all external requests + messages.append({ + 'level': 'error', + 'message': 'Blocked request to external URL', + 'details': f'Block request to URL "{url}". Requests to external systems are forbidden for security reasons.\nUpload this resource as assset and include it via its asset URL.', + }) + raise URLFetchingError('External requests not allowed') + + font_config = FontConfiguration() + html = HTML(string=html_content, base_url='reportcreator://', url_fetcher=weasyprint_url_fetcher) + css = CSS(string=css_styles, font_config=font_config, base_url='reportcreator://', url_fetcher=weasyprint_url_fetcher) + rendered = html.render(stylesheets=[css], font_config=font_config, optimize_size=[], presentational_hints=True) + + res = None + if not any(map(lambda m: m['level'] == 'error', messages)): + res = rendered.write_pdf(finisher=weasyprint_strip_pdf_metadata) + return res, messages + + +@log_timing +def encrypt_pdf(pdf_data: bytes, password: Optional[str]) -> bytes: + if not password: + return pdf_data + + with Pdf.open(BytesIO(pdf_data)) as pdf: + out = BytesIO() + # Encrypt PDF with AES-256 + pdf.save( + filename_or_stream=out, + encryption=Encryption(owner=password, user=password, aes=True, R=6) if password else False, + compress_streams=True + ) + return out.getvalue() + + +def render_pdf(template: str, styles: str, data: dict, resources: dict, language: str, password: Optional[str] = None) -> tuple[Optional[bytes], list[dict]]: + msgs = [] + html, html_msgs = render_to_html( + template=template, + data=data, + language=language, + ) + msgs += html_msgs + if html is None: + return None, msgs + + pdf, pdf_msgs = render_to_pdf( + html_content=html, + css_styles=styles, + resources=resources, + ) + msgs += pdf_msgs + if pdf is None: + return None, msgs + + pdf_enc = encrypt_pdf( + pdf_data=pdf, + password=password + ) + return pdf_enc, msgs diff --git a/api/src/reportcreator_api/tasks/rendering/tasks.py b/api/src/reportcreator_api/tasks/rendering/tasks.py new file mode 100644 index 0000000..e6c9db0 --- /dev/null +++ b/api/src/reportcreator_api/tasks/rendering/tasks.py @@ -0,0 +1,15 @@ +from base64 import b64encode +from celery import shared_task + +from reportcreator_api.utils.logging import log_timing +from reportcreator_api.tasks.rendering import render + + +@shared_task(name='reportcreator.render_pdf', expires=3 * 60, time_limit=3 * 60) +@log_timing +def render_pdf_task(*args, **kwargs) -> dict: + pdf, msgs = render.render_pdf(*args, **kwargs) + return { + 'pdf': b64encode(pdf).decode() if pdf else None, + 'messages': msgs, + } diff --git a/api/src/reportcreator_api/tests/__init__.py b/api/src/reportcreator_api/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/api/src/reportcreator_api/tests/mock.py b/api/src/reportcreator_api/tests/mock.py new file mode 100644 index 0000000..1e6b0b0 --- /dev/null +++ b/api/src/reportcreator_api/tests/mock.py @@ -0,0 +1,248 @@ +import random +from datetime import datetime, timedelta +from unittest import mock +from django.utils import timezone +from rest_framework.test import APIClient +from reportcreator_api.archive import crypto + +from reportcreator_api.pentests.customfields.utils import HandleUndefinedFieldsOptions, ensure_defined_structure +from reportcreator_api.pentests.models import FindingTemplate, NotebookPage, PentestFinding, PentestProject, ProjectType, UploadedAsset, UploadedImage, \ + ProjectMemberInfo, ProjectMemberRole, UploadedProjectFile, UploadedUserNotebookImage, Language, UserPublicKey +from reportcreator_api.pentests.customfields.predefined_fields import finding_field_order_default, finding_fields_default, report_fields_default, \ + report_sections_default +from reportcreator_api.pentests.models.archive import ArchivedProject, ArchivedProjectKeyPart, ArchivedProjectPublicKeyEncryptedKeyPart +from reportcreator_api.users.models import PentestUser, MFAMethod +from django.core.files.uploadedfile import SimpleUploadedFile + + +def create_png_file() -> bytes: + # 1x1 pixel PNG file + # Source: https://commons.wikimedia.org/wiki/File:1x1.png + return b'\x89PNG\r\n\x1a\n\x00\x00\x00\r' + \ + b'IHDR\x00\x00\x00\x01\x00\x00\x00\x01\x01\x03\x00\x00\x00%\xdbV\xca\x00\x00\x00\x03' + \ + b'PLTE\x00\x00\x00\xa7z=\xda\x00\x00\x00\x01tRNS\x00@\xe6\xd8f\x00\x00\x00\n' + \ + b'IDAT\x08\xd7c`\x00\x00\x00\x02\x00\x01\xe2!\xbc3\x00\x00\x00\x00IEND\xaeB`\x82' + + +def create_user(mfa=False, public_key=False, notes_kwargs=None, images_kwargs=None, **kwargs) -> PentestUser: + username = f'user{random.randint(0, 100000)}' + user = PentestUser.objects.create_user(**{ + 'username': username, + 'password': None, + 'email': username + '@example.com', + 'first_name': 'Herbert', + 'last_name': 'Testinger', + } | kwargs) + if mfa: + MFAMethod.objects.create_totp(user=user, is_primary=True) + MFAMethod.objects.create_backup(user=user) + if public_key: + create_public_key(user=user) + + for note_kwargs in notes_kwargs if notes_kwargs is not None else [{}]: + create_notebookpage(user=user, **note_kwargs) + for idx, image_kwargs in enumerate(images_kwargs if images_kwargs is not None else [{}]): + UploadedUserNotebookImage.objects.create(linked_object=user, **{ + 'name': f'file{idx}.png', + 'file': SimpleUploadedFile(name=f'file{idx}.png', content=create_png_file()) + } | image_kwargs) + + return user + + +def create_template(**kwargs) -> FindingTemplate: + data = { + 'title': f'Finding Template #{random.randint(1, 100000)}', + 'description': 'Template Description', + 'recommendation': 'Template Recommendation', + 'undefined_field': 'test', + } | kwargs.pop('data', {}) + template = FindingTemplate.objects.create(**{ + 'language': Language.ENGLISH, + 'tags': ['web', 'dev'], + } | kwargs) + template.update_data(data) + template.save() + return template + + +def create_project_type(**kwargs) -> ProjectType: + additional_fields = { + 'field_string': {'type': 'string', 'label': 'String Field', 'default': 'test'}, + 'field_markdown': {'type': 'markdown', 'label': 'Markdown Field', 'default': '# test\nmarkdown'}, + 'field_cvss': {'type': 'cvss', 'label': 'CVSS Field', 'default': 'n/a'}, + 'field_date': {'type': 'date', 'label': 'Date Field', 'default': '2022-01-01'}, + 'field_int': {'type': 'number', 'label': 'Number Field', 'default': 10}, + 'field_bool': {'type': 'boolean', 'label': 'Boolean Field', 'default': False}, + 'field_enum': {'type': 'enum', 'label': 'Enum Field', 'choices': [{'value': 'enum1', 'label': 'Enum Value 1'}, {'value': 'enum2', 'label': 'Enum Value 2'}], 'default': 'enum2'}, + 'field_combobox': {'type': 'combobox', 'label': 'Combobox Field', 'suggestions': ['value 1', 'value 2'], 'default': 'value1'}, + 'field_user': {'type': 'user', 'label': 'User Field'}, + 'field_object': {'type': 'object', 'label': 'Nested Object', 'properties': {'nested1': {'type': 'string', 'label': 'Nested Field'}}}, + 'field_list': {'type': 'list', 'label': 'List Field', 'items': {'type': 'string'}}, + 'field_list_objects': {'type': 'list', 'label': 'List of nested objects', 'items': {'type': 'object', 'properties': {'nested1': {'type': 'string', 'label': 'Nested object field', 'default': None}}}}, + } + project_type = ProjectType.objects.create(**{ + 'name': f'Project Type #{random.randint(1, 100000)}', + 'language': Language.ENGLISH, + 'report_fields': report_fields_default() | additional_fields, + 'report_sections': report_sections_default(), + 'finding_fields': finding_fields_default() | additional_fields, + 'finding_field_order': finding_field_order_default(), + 'report_template': '''

{{ report.title }}

{{ finding.title }}

''', + 'report_styles': '''@page { size: A4 portrait; } h1 { font-size: 3em; font-weight: bold; }''', + 'report_preview_data': { + 'report': {'title': 'Demo Report', 'field_string': 'test', 'field_int': 5, 'undefined_field': 'test'}, + 'findings': [{'title': 'Demo finding', 'undefined_field': 'test'}] + } + } | kwargs) + UploadedAsset.objects.create(linked_object=project_type, name='file1.png', file=SimpleUploadedFile(name='file1.png', content=b'file1')) + UploadedAsset.objects.create(linked_object=project_type, name='file2.png', file=SimpleUploadedFile(name='file2.png', content=b'file2')) + return project_type + + +def create_finding(project, template=None, **kwargs) -> PentestFinding: + data = ensure_defined_structure( + value={ + 'title': f'Finding #{random.randint(0, 100000)}', + 'cvss': 'CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H', + 'description': 'Finding Description', + 'recommendation': 'Finding Recommendation', + 'undefined_field': 'test', + } | (template.data if template else {}), + definition=project.project_type.finding_fields_obj, + handle_undefined=HandleUndefinedFieldsOptions.FILL_DEFAULT, + include_undefined=True, + ) | kwargs.pop('data', {}) + finding = PentestFinding.objects.create(**{ + 'project': project, + 'assignee': None, + 'template_id': template.id if template else None, + } | kwargs) + finding.update_data(data) + finding.save() + return finding + + +def create_notebookpage(**kwargs) -> NotebookPage: + return NotebookPage.objects.create(**{ + 'title': f'Note #{random.randint(0, 100000)}', + 'text': 'Note text', + 'checked': random.choice([None, True, False]), + 'icon_emoji': random.choice([None, '🦖']), + 'status_emoji': random.choice([None, '✔️', '🤡']), + } | kwargs) + + +def create_project(project_type=None, members=[], report_data={}, findings_kwargs=None, notes_kwargs=None, images_kwargs=None, files_kwargs=None, **kwargs) -> PentestProject: + project_type = project_type or create_project_type() + project = PentestProject.objects.create(**{ + 'project_type': project_type, + 'name': f'Pentest Project #{random.randint(1, 100000)}', + 'language': Language.ENGLISH, + } | kwargs) + project.update_data({ + 'title': 'Report title', + 'undefined_field': 'test', + } | report_data) + project.save() + + member_infos = [] + for m in members: + if isinstance(m, PentestUser): + member_infos.append(ProjectMemberInfo(project=project, user=m, roles=[ProjectMemberRole.default_roles])) + elif isinstance(m, ProjectMemberInfo): + m.project = project + member_infos.append(m) + else: + raise ValueError('Unsupported member type') + ProjectMemberInfo.objects.bulk_create(member_infos) + + for finding_kwargs in findings_kwargs if findings_kwargs is not None else [{}] * 3: + create_finding(project=project, **finding_kwargs) + + for note_kwargs in notes_kwargs if notes_kwargs is not None else [{}] * 3: + create_notebookpage(project=project, **note_kwargs) + + for idx, image_kwargs in enumerate(images_kwargs if images_kwargs is not None else [{}] * 2): + UploadedImage.objects.create(linked_object=project, **{ + 'name': f'file{idx}.png', + 'file': SimpleUploadedFile(name=f'file{idx}.png', content=create_png_file()) + } | image_kwargs) + for idx, file_kwargs in enumerate(files_kwargs if files_kwargs is not None else [{}] * 2): + UploadedProjectFile.objects.create(linked_object=project, **{ + 'name': f'file{idx}.pdf', + 'file': SimpleUploadedFile(name=f'file{idx}.pdf', content=f'%PDF-1.3{idx}'.encode()) + } | file_kwargs) + + return project + + +def create_public_key(**kwargs): + dummy_data = { + 'name': f'Public key #{random.randint(1, 100000)}', + } + if 'public_key' not in kwargs: + dummy_data |= { + 'public_key': + '-----BEGIN PGP PUBLIC KEY BLOCK-----\n\n' + + 'mDMEZBryexYJKwYBBAHaRw8BAQdAI2A6jJCXSGP10s2H1duX22saF2lX4CtGzX+H\n' + + 'xm4nN8W0LEF1dG9nZW5lcmF0ZWQgS2V5IDx1bnNwZWNpZmllZEA3MmNmMGYzYTc4\n' + + 'NmQ+iJAEExYIADgWIQTC5xEj3lvM80ruTt39spmRS6kHgwUCZBryewIbIwULCQgH\n' + + 'AgYVCgkICwIEFgIDAQIeAQIXgAAKCRD9spmRS6kHgxspAQDrxnxj2eRaubEX547n\n' + + 'w+wE1PJohJqLoWERuCz2UuJLRwEA44NZVlPHdkwUXeP7otuOeA0ZCzOQIc+/60Pr\n' + + 'aeqVEQi4cwRkGvJ7EgUrgQQAIgMDBHlYyMT98UVGIaFUu2p/rkbOGnZ1k5d/KtMx\n' + + '8TxqyU1cpdIzTvOVD4ykunTzsWsi60ERcNg6vDuHcDCapHYmvuk/+g49NQFNutRX\n' + + 'fnNxVj091cH3ioJCgQ1wbYgoW0qfCQMBCQiIeAQYFggAIBYhBMLnESPeW8zzSu5O\n' + + '3f2ymZFLqQeDBQJkGvJ7AhsMAAoJEP2ymZFLqQeDrOUBAKnrakgp/dYWsMIHwiAg\n' + + 'Nq1F1YAX92oNteAVpTRNkwyIAQC68j1ytjpdoEbYlAPfQtKljjDSDONLxmmZWPxP\n' + + 'Ya8sAg==\n' + + '=jbm4\n' + + '-----END PGP PUBLIC KEY BLOCK-----\n', + 'public_key_info': { + 'cap': 'scaESCA', + 'algo': '22', + 'type': 'pub', + 'curve': 'ed25519', + 'subkey_info': { + 'C3B01D1054571D18': { + 'cap': 'e', + 'algo': '18', + 'type': 'sub', + 'curve': 'nistp384', + } + } + } + } + + return UserPublicKey.objects.create(**dummy_data | kwargs) + + +def create_archived_project(project=None, **kwargs): + name = project.name if project else f'Archive #{random.randint(1, 100000)}' + users = [m.user for m in project.members.all()] if project else [create_user(public_key=True)] + + archive = ArchivedProject.objects.create(name=name, threshold=1, file=SimpleUploadedFile('archive.tar.gz', crypto.MAGIC + b'dummy-data')) + key_parts = [] + encrypted_key_parts = [] + for u in users: + key_parts.append(ArchivedProjectKeyPart(archived_project=archive, user=u, encrypted_key_part=b'dummy-data')) + for pk in u.public_keys.all(): + encrypted_key_parts.append(ArchivedProjectPublicKeyEncryptedKeyPart(key_part=key_parts[-1], public_key=pk, encrypted_data='dummy-data')) + + if not encrypted_key_parts: + raise ValueError('No public keys set for users') + ArchivedProjectKeyPart.objects.bulk_create(key_parts) + ArchivedProjectPublicKeyEncryptedKeyPart.objects.bulk_create(encrypted_key_parts) + return archive + + +def mock_time(before=None, after=None): + return mock.patch('django.utils.timezone.now', + lambda: datetime.now(tz=timezone.get_current_timezone()) - (before or timedelta()) + (after or timedelta())) + + +def api_client(user=None): + client = APIClient() + client.force_authenticate(user) + return client + diff --git a/api/src/reportcreator_api/tests/test_api.py b/api/src/reportcreator_api/tests/test_api.py new file mode 100644 index 0000000..6b0cccf --- /dev/null +++ b/api/src/reportcreator_api/tests/test_api.py @@ -0,0 +1,370 @@ +import pytest +from django.urls import reverse +from django.core.files.base import ContentFile +from django.http import FileResponse, StreamingHttpResponse +from django.test import override_settings +from django.utils import timezone +from rest_framework.test import APIClient +from reportcreator_api.users.models import AuthIdentity +from reportcreator_api.pentests.models import ProjectType, FindingTemplate, PentestProject, ProjectTypeScope, SourceEnum, UploadedUserNotebookImage +from reportcreator_api.notifications.models import NotificationSpec +from reportcreator_api.tests.mock import create_archived_project, create_user, create_project, create_project_type, create_template, create_png_file +from reportcreator_api.archive.import_export import export_project_types, export_projects, export_templates + + +def export_archive(obj): + if isinstance(obj, ProjectType): + exp = export_project_types([obj]) + elif isinstance(obj, FindingTemplate): + exp = export_templates([obj]) + elif isinstance(obj, PentestProject): + exp = export_projects([obj]) + return ContentFile(content=b''.join(exp), name='export.tar.gz') + + +def viewset_urls(basename, get_kwargs, create_data={}, list=False, retrieve=False, create=False, update=False, update_partial=False, destroy=False, lock=False, unlock=False): + list_urlname = basename + '-list' + detail_urlname = basename + '-detail' + + out = [] + if list: + out.append((basename + ' list', lambda s, c: c.get(reverse(list_urlname, kwargs=get_kwargs(s, False))))) + if retrieve: + out.append((basename + ' retrieve', lambda s, c: c.get(reverse(detail_urlname, kwargs=get_kwargs(s, True))))) + if create: + out.append((basename + ' create', lambda s, c: c.post(reverse(list_urlname, kwargs=get_kwargs(s, False)), data=c.get(reverse(detail_urlname, kwargs=get_kwargs(s, True))).data | create_data))) + if update: + out.append((basename + ' update', lambda s, c: c.put(reverse(detail_urlname, kwargs=get_kwargs(s, True)), data=c.get(reverse(detail_urlname, kwargs=get_kwargs(s, True))).data))) + if update_partial: + out.append((basename + ' partial_update', lambda s, c: c.patch(reverse(detail_urlname, kwargs=get_kwargs(s, True)), data=c.get(reverse(detail_urlname, kwargs=get_kwargs(s, True))).data))) + if destroy: + out.append((basename + ' destroy', lambda s, c: c.delete(reverse(detail_urlname, kwargs=get_kwargs(s, True))))) + if lock: + out.append((basename + ' lock', lambda s, c: c.post(reverse(basename + '-lock', kwargs=get_kwargs(s, True)), data={}))) + if unlock: + out.append((basename + ' unlock', lambda s, c: c.post(reverse(basename + '-unlock', kwargs=get_kwargs(s, True)), data={}))) + return out + + +def file_viewset_urls(basename, get_obj, get_base_kwargs=None, read=False, write=False): + get_base_kwargs = get_base_kwargs or (lambda s: {}) + def get_kwargs(s, detail): + obj = get_obj(s) + return get_base_kwargs(s) | ({'filename': obj.name} if detail == 'name' else {'pk': obj.pk} if detail else {}) + + out = viewset_urls(basename=basename, get_kwargs=get_kwargs, retrieve=read, update_partial=write, destroy=write) + if read: + out.append((basename + ' retrieve-by-name', lambda s, c: c.get(reverse(basename + '-retrieve-by-name', kwargs=get_kwargs(s, 'name'))))) + if write: + out.extend([ + (basename + ' create', lambda s, c: c.post( + path=reverse(basename + '-list', kwargs=get_kwargs(s, False)), + data={'name': 'image.png', 'file': ContentFile(name='image.png', content=create_png_file())}, + format='multipart', + )), + (basename + ' update', lambda s, c: c.put( + path=reverse(basename + '-detail', kwargs=get_kwargs(s, True)), + data={'name': 'image.png', 'file': ContentFile(name='image2.png', content=create_png_file())}, + format='multipart' + )), + ]) + return out + + +def project_viewset_urls(get_obj, read=False, write=False, create=False, list=False, destory=None, update=None): + destory = destory if destory is not None else write + update = update if update is not None else write + + out = [ + *viewset_urls('pentestproject', get_kwargs=lambda s, detail: {'pk': get_obj(s).pk} if detail else {}, list=list, retrieve=read, create=create, update=update, update_partial=update, destroy=destory), + *viewset_urls('section', get_kwargs=lambda s, detail: {'project_pk': get_obj(s).pk} | ({'section_id': get_obj(s).sections.first().section_id} if detail else {}), list=read, retrieve=read, update=write, update_partial=write, lock=write, unlock=write), + *viewset_urls('finding', get_kwargs=lambda s, detail: {'project_pk': get_obj(s).pk} | ({'finding_id': get_obj(s).findings.first().finding_id} if detail else {}), list=read, retrieve=read, create=write, destroy=write, update=write, update_partial=write, lock=write, unlock=write), + *viewset_urls('projectnotebookpage', get_kwargs=lambda s, detail: {'project_pk': get_obj(s).pk} | ({'note_id': get_obj(s).notes.first().note_id} if detail else {}), list=read, retrieve=read, create=write, destroy=write, update=write, update_partial=write, lock=write, unlock=write), + *file_viewset_urls('uploadedimage', get_base_kwargs=lambda s: {'project_pk': get_obj(s).pk}, get_obj=lambda s: get_obj(s).images.first(), read=read, write=write), + *file_viewset_urls('uploadedprojectfile', get_base_kwargs=lambda s: {'project_pk': get_obj(s).pk}, get_obj=lambda s: get_obj(s).files.first(), read=read, write=write), + ] + if read: + out.extend([ + ('pentestproject check', lambda s, c: c.get(reverse('pentestproject-check', kwargs={'pk': get_obj(s).pk}))), + ('pentestproject export', lambda s, c: c.post(reverse('pentestproject-export', kwargs={'pk': get_obj(s).pk}))), + ('pentestproject export-all', lambda s, c: c.post(reverse('pentestproject-export-all', kwargs={'pk': get_obj(s).pk}))), + ('pentestproject preview', lambda s, c: c.post(reverse('pentestproject-preview', kwargs={'pk': get_obj(s).pk}), data={})), + ('pentestproject generate', lambda s, c: c.post(reverse('pentestproject-generate', kwargs={'pk': get_obj(s).pk}), data={'password': 'pdf-password'})), + ]) + if write: + out.extend([ + ('pentestproject finding-fromtemplate', lambda s, c: c.post(reverse('finding-fromtemplate', kwargs={'project_pk': get_obj(s).pk}), data={'template': s.template.pk})), + ('projectnotebookpage sort', lambda s, c: c.post(reverse('projectnotebookpage-sort', kwargs={'project_pk': get_obj(s).pk}), data=[])), + ('pentestproject upload-image-or-file', lambda s, c: c.post(reverse('pentestproject-upload-image-or-file', kwargs={'pk': get_obj(s).pk}), data={'name': 'image.png', 'file': ContentFile(name='image.png', content=create_png_file())}, format='multipart')), + ('pentestproject upload-image-or-file', lambda s, c: c.post(reverse('pentestproject-upload-image-or-file', kwargs={'pk': get_obj(s).pk}), data={'name': 'test.pdf', 'file': ContentFile(name='text.pdf', content=b'text')}, format='multipart')), + ]) + if update: + out.extend([ + ('pentestproject customize-projecttype', lambda s, c: c.post(reverse('pentestproject-customize-projecttype', kwargs={'pk': get_obj(s).pk}), data={'project_type': get_obj(s).project_type.id})), + ]) + if create: + out.extend([ + ('pentestproject copy', lambda s, c: c.post(reverse('pentestproject-copy', kwargs={'pk': get_obj(s).pk}), data={})), + ('pentestproject import', lambda s, c: c.post(reverse('pentestproject-import'), data={'file': export_archive(get_obj(s))}, format='multipart')), + ]) + return out + + +def projecttype_viewset_urls(get_obj, read=False, write=False, create_global=False, list=False): + out = [ + *viewset_urls('projecttype', get_kwargs=lambda s, detail: {'pk': get_obj(s).pk} if detail else {}, list=list, retrieve=read, create=create_global, create_data={'scope': ProjectTypeScope.GLOBAL}, update=write, update_partial=write, destroy=write, lock=write, unlock=write), + *file_viewset_urls('uploadedasset', get_base_kwargs=lambda s: {'projecttype_pk': get_obj(s).pk}, get_obj=lambda s: get_obj(s).assets.first(), read=read, write=write), + ] + if read: + out.extend([ + ('projecttype preview', lambda s, c: c.post(reverse('projecttype-preview', kwargs={'pk': get_obj(s).pk}), data={'report_template': '', 'report_styles': '', 'report_preview_data': {}})), + ('projecttype export', lambda s, c: c.post(reverse('projecttype-export', kwargs={'pk': get_obj(s).pk}))), + ('projecttype copy private', lambda s, c: c.post(reverse('projecttype-copy', kwargs={'pk': get_obj(s).pk}), data={'scope': ProjectTypeScope.PRIVATE})), + ]) + if create_global: + out.extend([ + ('projecttype import', lambda s, c: c.post(reverse('projecttype-import'), data={'file': export_archive(get_obj(s))}, format='multipart')), + ('projecttype copy global', lambda s, c: c.post(reverse('projecttype-copy', kwargs={'pk': get_obj(s).pk}), data={'scope': ProjectTypeScope.GLOBAL})), + ]) + if list: + out.extend([ + ('projecttype get-predefined-finding-fields', lambda s, c: c.get(reverse('projecttype-get-predefined-finding-fields'))), + ]) + return out + + +def expect_result(urls, allowed_users=None): + all_users = {'public', 'guest', 'regular', 'template_editor', 'designer', 'user_manager', 'superuser'} + + for user in allowed_users or []: + yield from [(user, *u, True) for u in urls] + for user in all_users - set(allowed_users or []): + yield from [(user, *u, False) for u in urls] + + + +def public_urls(): + return [ + ('utils healthcheck', lambda s, c: c.get(reverse('utils-healthcheck'))), + ('utils settings', lambda s, c: c.get(reverse('utils-settings'))), + ] + + +def guest_urls(): + return [ + ('utils list', lambda s, c: c.get(reverse('utils-list'))), + + *viewset_urls('pentestuser', get_kwargs=lambda s, detail: {'pk': 'self'}, retrieve=True, update=True, update_partial=True), + *viewset_urls('pentestuser', get_kwargs=lambda s, detail: {}, list=True), + *viewset_urls('mfamethod', get_kwargs=lambda s, detail: {'pentestuser_pk': 'self'} | ({'pk': s.current_user.mfa_methods.get(is_primary=True).id if s.current_user else 'fake-uuid'} if detail else {}), list=True, retrieve=True, update=True, update_partial=True, destroy=True), + ('mfamethod register backup', lambda s, c: c.post(reverse('mfamethod-register-backup-begin', kwargs={'pentestuser_pk': 'self'}))), + ('mfamethod totp backup', lambda s, c: c.post(reverse('mfamethod-register-totp-begin', kwargs={'pentestuser_pk': 'self'}))), + ('mfamethod fido2 backup', lambda s, c: c.post(reverse('mfamethod-register-fido2-begin', kwargs={'pentestuser_pk': 'self'}))), + *viewset_urls('notification', get_kwargs=lambda s, detail: {'pentestuser_pk': 'self'} | ({'pk': s.current_user.notifications.first().id if s.current_user else 'fake-uuid'} if detail else {}), list=True, retrieve=True, update=True, update_partial=True), + *viewset_urls('userpublickey', get_kwargs=lambda s, detail: {'pentestuser_pk': 'self'} | ({'pk': s.current_user.public_keys.first().id if s.current_user else 'fake-uuid'} if detail else {}), list=True, retrieve=True, update=True, update_partial=True), + + *viewset_urls('usernotebookpage', get_kwargs=lambda s, detail: {'note_id': s.current_user.notes.first().note_id if s.current_user else 'fake-uuid'} if detail else {}, list=True, retrieve=True, create=True, update=True, update_partial=True, destroy=True, lock=True, unlock=True), + ('usernotebookpage sort', lambda s, c: c.post(reverse('usernotebookpage-sort'), data=[])), + *file_viewset_urls('uploadedusernotebookimage', get_obj=lambda s: s.current_user.images.first() if s.current_user else UploadedUserNotebookImage(name='nonexistent.png'), read=True, write=True), + + *viewset_urls('findingtemplate', get_kwargs=lambda s, detail: {'pk': s.template.pk} if detail else {}, list=True, retrieve=True), + ('findingtemplate fielddefinition', lambda s, c: c.get(reverse('findingtemplate-fielddefinition'))), + + ('projecttype create private', lambda s, c: c.post(reverse('projecttype-list'), data=c.get(reverse('projecttype-detail', kwargs={'pk': s.project_type.pk})).data | {'scope': ProjectTypeScope.PRIVATE})), + *projecttype_viewset_urls(get_obj=lambda s: s.project_type, list=True, read=True), + *projecttype_viewset_urls(get_obj=lambda s: s.project_type_customized, read=True, write=True), + *projecttype_viewset_urls(get_obj=lambda s: s.project_type_snapshot, read=True), + *projecttype_viewset_urls(get_obj=lambda s: ProjectType.objects.filter(linked_user=s.current_user or s.user_regular).first(), read=True, write=True), + + *project_viewset_urls(get_obj=lambda s: s.project, list=True, read=True, write=True, destory=False, update=False), + *project_viewset_urls(get_obj=lambda s: s.project_readonly, read=True), + + *viewset_urls('archivedproject', get_kwargs=lambda s, detail: {'pk': s.archived_project.pk} if detail else {}, list=True, retrieve=True), + *viewset_urls('archivedprojectkeypart', get_kwargs=lambda s, detail: {'archivedproject_pk': s.archived_project.pk} | ({'pk': s.archived_project.key_parts.first().pk} if detail else {}), list=True, retrieve=True), + ('archivedprojectkeypart public-key-encrypted-data', lambda s, c: c.get(reverse('archivedprojectkeypart-public-key-encrypted-data', kwargs={'archivedproject_pk': s.archived_project.pk, 'pk': getattr(s.archived_project.key_parts.filter(user=s.current_user).first(), 'pk', 'fake-uuid')}))), + ] + + +def regular_user_urls(): + return [ + *viewset_urls('pentestuser', get_kwargs=lambda s, detail: {'pk': s.user_other.pk} if detail else {}, retrieve=True), + + *project_viewset_urls(get_obj=lambda s: s.project, create=True, update=True, destory=True), + ('pentestproject readonly', lambda s, c: c.put(reverse('pentestproject-readonly', kwargs={'pk': s.project.pk}), data={'readonly': True})), + ('pentestproject readonly', lambda s, c: c.put(reverse('pentestproject-readonly', kwargs={'pk': s.project_readonly.pk}), data={'readonly': False})), + + ('pentestproject archive-check', lambda s, c: c.get(reverse('pentestproject-archive-check', kwargs={'pk': s.project_readonly.pk}))), + ('pentestproject archive', lambda s, c: c.post(reverse('pentestproject-archive', kwargs={'pk': s.project_readonly.pk}))), + ] + + +def template_editor_urls(): + return { + *viewset_urls('findingtemplate', get_kwargs=lambda s, detail: {'pk': s.template.pk} if detail else {}, create=True, update=True, update_partial=True, destroy=True, lock=True, unlock=True), + ('findingtemplate export', lambda s, c: c.post(reverse('findingtemplate-export', kwargs={'pk': s.template.pk}))), + ('findingtemplate import', lambda s, c: c.post(reverse('findingtemplate-import'), data={'file': export_archive(s.template)}, format='multipart')), + } + + +def designer_urls(): + return [ + *projecttype_viewset_urls(get_obj=lambda s: s.project_type, create_global=True, write=True), + ] + + +def user_manager_urls(): + return [ + *viewset_urls('pentestuser', get_kwargs=lambda s, detail: {'pk': s.user_other.pk} if detail else {}, create=True, create_data={'username': 'new', 'password': 'D40C4dEyH9Naam6!'}, update=True, update_partial=True), + ('pentestuser reset-password', lambda s, c: c.post(reverse('pentestuser-reset-password', kwargs={'pk': s.user_other.pk}), data={'password': 'D40C4dEyH9Naam6!'})), + *viewset_urls('mfamethod', get_kwargs=lambda s, detail: {'pentestuser_pk': s.user_other.pk} | ({'pk': s.user_other.mfa_methods.get(is_primary=True).pk} if detail else {}), list=True, retrieve=True, destroy=True), + *viewset_urls('authidentity', get_kwargs=lambda s, detail: {'pentestuser_pk': s.user_other.pk} | ({'pk': s.user_other.auth_identities.first().pk} if detail else {}), list=True, retrieve=True, create=True, create_data={'identifier': 'other.identifier'}, update=True, update_partial=True, destroy=True), + *viewset_urls('userpublickey', get_kwargs=lambda s, detail: {'pentestuser_pk': s.user_other.pk} | ({'pk': s.user_other.public_keys.first().pk} if detail else {}), list=True, retrieve=True), + ('utils-license', lambda s, c: c.get(reverse('utils-license'))), + ] + + +def superuser_urls(): + return [ + ('pentestuser enable-admin-permissions', lambda s, c: c.post(reverse('pentestuser-enable-admin-permissions'))), + ('pentestuser disable-admin-permissions', lambda s, c: c.post(reverse('pentestuser-disable-admin-permissions'))), + + *projecttype_viewset_urls(get_obj=lambda s: s.project_type_snapshot, write=True), + + # Not a project member + *project_viewset_urls(get_obj=lambda s: s.project_unauthorized, read=True, write=True), + *projecttype_viewset_urls(get_obj=lambda s: s.project_type_customized_unauthorized, read=True, write=True), + *projecttype_viewset_urls(get_obj=lambda s: s.project_type_private_unauthorized, read=True, write=True), + + ('pentestproject archive-check', lambda s, c: c.get(reverse('pentestproject-archive-check', kwargs={'pk': s.project_readonly_unauthorized.pk}))), + ('pentestproject archive', lambda s, c: c.post(reverse('pentestproject-archive', kwargs={'pk': s.project_readonly_unauthorized.pk}))), + *viewset_urls('archivedproject', get_kwargs=lambda s, detail: {'pk': s.archived_project_unauthorized.pk} if detail else {}, retrieve=True), + *viewset_urls('archivedprojectkeypart', get_kwargs=lambda s, detail: {'archivedproject_pk': s.archived_project_unauthorized.pk} | ({'pk': s.archived_project_unauthorized.key_parts.first().pk} if detail else {}), list=True, retrieve=True), + ('archivedprojectkeypart public-key-encrypted-data', lambda s, c: c.get(reverse('archivedprojectkeypart-public-key-encrypted-data', kwargs={'archivedproject_pk': s.archived_project_unauthorized.pk, 'pk': s.archived_project_unauthorized.key_parts.first().pk}))), + ] + + +def forbidden_urls(): + return [ + *project_viewset_urls(get_obj=lambda s: s.project_readonly, write=True), + ('mfamethod register backup', lambda s, c: c.post(reverse('mfamethod-register-backup-begin', kwargs={'pentestuser_pk': s.user_other.pk}))), + ('mfamethod totp backup', lambda s, c: c.post(reverse('mfamethod-register-totp-begin', kwargs={'pentestuser_pk': s.user_other.pk}))), + ('mfamethod fido2 backup', lambda s, c: c.post(reverse('mfamethod-register-fido2-begin', kwargs={'pentestuser_pk': s.user_other.pk}))), + *viewset_urls('userpublickey', get_kwargs=lambda s, detail: {'pentestuser_pk': s.user_other.pk} | ({'pk': s.user_other.public_keys.first().pk} if detail else {}), update=True, update_partial=True, destroy=True), + ('userpublickey register begin', lambda s, c: c.post(reverse('userpublickey-register-begin', kwargs={'pentestuser_pk': s.user_other.pk}), data={'name': 'new', 'public_key': s.user_other.public_keys.first().public_key})), + ] + + +def build_test_parameters(): + yield from expect_result( + urls=public_urls(), + allowed_users=['public', 'guest', 'regular', 'template_editor', 'designer', 'user_manager', 'superuser'] + ) + yield from expect_result( + urls=guest_urls(), + allowed_users=['guest', 'regular', 'template_editor', 'designer', 'user_manager', 'superuser'] + ) + yield from expect_result( + urls=regular_user_urls(), + allowed_users=['regular', 'template_editor', 'designer', 'user_manager', 'superuser'], + ) + yield from expect_result( + urls=template_editor_urls(), + allowed_users=['template_editor', 'superuser'], + ) + yield from expect_result( + urls=designer_urls(), + allowed_users=['designer', 'superuser'], + ) + yield from expect_result( + urls=user_manager_urls(), + allowed_users=['user_manager', 'superuser'], + ) + yield from expect_result( + urls=superuser_urls(), + allowed_users=['superuser'], + ) + yield from expect_result( + urls=forbidden_urls(), + allowed_users=[], + ) + + +@pytest.mark.django_db +class TestApiRequestsAndPermissions: + @pytest.fixture(autouse=True) + def setUp(self): + self.user_guest = create_user(username='guest', is_guest=True, mfa=True, public_key=True) + self.user_regular = create_user(username='regular', mfa=True, public_key=True) + self.user_template_editor = create_user(username='template_editor', is_template_editor=True, mfa=True, public_key=True) + self.user_designer = create_user(username='designer', is_designer=True, mfa=True, public_key=True) + self.user_user_manager = create_user(username='user_manager', is_user_manager=True, mfa=True, public_key=True) + self.user_superuser = create_user(username='superuser', is_superuser=True, is_staff=True, mfa=True, public_key=True) + self.user_superuser.admin_permissions_enabled = True + self.user_map = { + 'guest': self.user_guest, + 'regular': self.user_regular, + 'template_editor': self.user_template_editor, + 'designer': self.user_designer, + 'user_manager': self.user_user_manager, + 'superuser': self.user_superuser, + } + + self.user_other = create_user(username='other', mfa=True, public_key=True) + AuthIdentity.objects.create(user=self.user_other, provider='dummy', identifier='other.user@example.com') + NotificationSpec.objects.create(text='Test') + + self.current_user = None + + self.project = create_project(members=self.user_map.values()) + self.project_readonly = create_project(members=self.user_map.values(), readonly=True) + self.project_unauthorized = create_project(members=[self.user_other]) + self.project_readonly_unauthorized = create_project(members=[self.user_other], readonly=True) + + self.archived_project = create_archived_project(project=self.project_readonly) + self.archived_project_unauthorized = create_archived_project(project=self.project_unauthorized) + + self.project_type = create_project_type() + self.project_type_customized = create_project_type(source=SourceEnum.CUSTOMIZED, linked_project=self.project) + self.project_type_customized_unauthorized = create_project_type(source=SourceEnum.CUSTOMIZED, linked_project=self.project_unauthorized) + self.project_type_snapshot = create_project_type(source=SourceEnum.SNAPSHOT, linked_project=self.project) + self.project_type_private_unauthorized = create_project_type(source=SourceEnum.CREATED, linked_user=self.user_other) + # Personal project_types + for u in self.user_map.values(): + create_project_type(source=SourceEnum.CREATED, linked_user=u) + + self.template = create_template() + + # Override settings + with override_settings( + GUEST_USERS_CAN_IMPORT_PROJECTS=False, + GUEST_USERS_CAN_CREATE_PROJECTS=False, + GUEST_USERS_CAN_DELETE_PROJECTS=False, + GUEST_USERS_CAN_UPDATE_PROJECT_SETTINGS=False, + AUTHLIB_OAUTH_CLIENTS={ + 'dummy': { + 'label': 'Dummy', + } + } + ): + yield + + @pytest.mark.parametrize('user,name,perform_request,expected', sorted(build_test_parameters(), key=lambda t: (t[0], t[1], t[3]))) + def test_api_requests(self, user, name, perform_request, expected): + client = APIClient() + if user_obj := self.user_map.get(user): + client.force_authenticate(user_obj) + session = client.session + session['authentication_info'] = { + 'login_time': timezone.now().isoformat(), + 'reauth_time': timezone.now().isoformat(), + } + session.save() + self.current_user = user_obj + + res = perform_request(self, client) + info = res.data if not isinstance(res, (FileResponse, StreamingHttpResponse)) else res + if expected: + assert 200 <= res.status_code < 300, {'message': 'API request failed, but should have succeeded', 'info': info} + else: + assert 400 <= res.status_code < 500, {'message': 'API request succeeded, but should have failed', 'info': info} + diff --git a/api/src/reportcreator_api/tests/test_api2.py b/api/src/reportcreator_api/tests/test_api2.py new file mode 100644 index 0000000..0c15b03 --- /dev/null +++ b/api/src/reportcreator_api/tests/test_api2.py @@ -0,0 +1,119 @@ +from uuid import uuid4 +from django.urls import reverse +import pytest +from rest_framework.test import APIClient +from reportcreator_api.pentests.models import ProjectType, ProjectTypeScope, SourceEnum + +from reportcreator_api.tests.mock import create_project, create_project_type, create_user, api_client + + +@pytest.mark.django_db +class TestProjectApi: + @pytest.fixture(autouse=True) + def setUp(self): + self.user = create_user() + self.project_type = create_project_type() + + self.client = api_client(self.user) + + def test_create_project(self): + p = self.client.post(reverse('pentestproject-list'), data={ + 'name': 'New Project', + 'project_type': self.project_type.id, + 'members': [], + }).json() + + # User added as member + assert len(p['members']) == 1 + assert p['members'][0]['id'] == str(self.user.id) + + # ProjectType copied on create + assert p['project_type'] != str(self.project_type.id) + assert self.client.get(reverse('projecttype-detail', kwargs={'pk': p['project_type']})).json()['source'] == SourceEnum.SNAPSHOT + + def test_copy_project(self): + project = create_project(project_type=self.project_type, members=[self.user]) + cp = self.client.post(reverse('pentestproject-copy', kwargs={'pk': project.id})).json() + assert cp['id'] != str(project.id) + assert cp['project_type'] != str(project.project_type.id) + pt = ProjectType.objects.get(id=cp['project_type']) + assert pt.source == SourceEnum.SNAPSHOT + assert str(pt.linked_project.id) == cp['id'] + + def test_change_design(self): + project = create_project(members=[self.user]) + project.project_type.linked_project = project + project.project_type.save() + + # ProjectType not changed + u = self.client.patch(reverse('pentestproject-detail', kwargs={'pk': project.id}), data={ + 'project_type': project.project_type.id, + }).json() + assert u['project_type'] == str(project.project_type.id) + + # ProjectType changed + p = self.client.patch(reverse('pentestproject-detail', kwargs={'pk': project.id}), data={ + 'project_type': self.project_type.id + }).json() + + assert p['project_type'] not in [str(project.project_type.id), str(self.project_type.id)] + pt = ProjectType.objects.get(id=p['project_type']) + assert pt.source == SourceEnum.SNAPSHOT + assert pt.linked_project == project + + def test_change_imported_members(self): + project = create_project(members=[self.user], imported_members=[{ + 'id': uuid4(), + 'additional_field': 'test', + 'roles': [], + }]) + res = self.client.patch(reverse('pentestproject-detail', kwargs={'pk': project.id}), data={ + 'imported_members': [{'id': project.imported_members[0]['id'], 'roles': ['pentester']}] + }) + assert res.status_code == 200 + project.refresh_from_db() + assert project.imported_members[0]['roles'] == ['pentester'] + assert project.imported_members[0]['additional_field'] == 'test' + + +@pytest.mark.django_db +class TestProjectTypeApi: + @pytest.mark.parametrize('user,scope,expected', [ + ('designer', ProjectTypeScope.GLOBAL, True), + ('designer', ProjectTypeScope.PRIVATE, True), + ('regular', ProjectTypeScope.GLOBAL, False), + ('regular', ProjectTypeScope.PRIVATE, True), + ]) + def test_create_design(self, user, scope, expected): + user = create_user(is_designer=user == 'designer') + res = api_client(user).post(reverse('projecttype-list'), data={'name': 'Test', 'scope': scope}) + assert (res.status_code == 201) == expected + if expected: + assert res.data['scope'] == scope + pt = ProjectType.objects.get(id=res.data['id']) + assert pt.scope == scope + assert pt.linked_project is None + assert pt.linked_user == (user if scope == 'private' else None) + + @pytest.mark.parametrize('user,project_type,scope,expected', [ + ('designer', 'global', ProjectTypeScope.GLOBAL, True), + ('designer', 'global', ProjectTypeScope.PRIVATE, True), + ('designer', 'private', ProjectTypeScope.GLOBAL, True), + ('designer', 'private', ProjectTypeScope.PRIVATE, True), + + ('regular', 'global', ProjectTypeScope.GLOBAL, False), + ('regular', 'global', ProjectTypeScope.PRIVATE, True), + ('regular', 'private', ProjectTypeScope.GLOBAL, False), + ('regular', 'private', ProjectTypeScope.PRIVATE, True), + ]) + def test_copy_design(self, user, project_type, scope, expected): + user = create_user(is_designer=user == 'designer') + project_type = create_project_type(linked_user=user if project_type == 'private' else None) + res = api_client(user).post(reverse('projecttype-copy', kwargs={'pk': project_type.id}), data={'scope': scope}) + assert (res.status_code == 201) == expected + if expected: + assert res.data['scope'] == scope + pt = ProjectType.objects.get(id=res.data['id']) + assert pt.scope == scope + assert pt.linked_project is None + assert pt.linked_user == (user if scope == ProjectTypeScope.PRIVATE else None) diff --git a/api/src/reportcreator_api/tests/test_auth.py b/api/src/reportcreator_api/tests/test_auth.py new file mode 100644 index 0000000..708e382 --- /dev/null +++ b/api/src/reportcreator_api/tests/test_auth.py @@ -0,0 +1,224 @@ +import pytest +import pyotp +from datetime import timedelta +from django.conf import settings +from django.urls import reverse +from rest_framework.test import APIClient + +from reportcreator_api.utils.utils import omit_keys +from reportcreator_api.tests.mock import create_project, create_user, mock_time, api_client +from reportcreator_api.users.models import MFAMethod, MFAMethodType + + + +@pytest.mark.django_db +class TestLogin: + @pytest.fixture(autouse=True) + def setUp(self): + self.password = 'Password1!' + self.user = create_user(username='user', password=self.password) + self.user_mfa = create_user(username='user_mfa', password=self.password) + self.mfa_backup = MFAMethod.objects.create_backup(user=self.user_mfa) + self.mfa_totp = MFAMethod.objects.create_totp(user=self.user_mfa) + + self.client = api_client() + + def assert_api_access(self, expected): + res = self.client.get(reverse('pentestuser-self')) + if expected: + assert res.status_code == 200 + else: + assert res.status_code in [401, 403] + + def assert_login(self, user, password=None, success=True, status='success'): + res = self.client.post(reverse('auth-login'), data={ + 'username': user.username, + 'password': password or self.password, + }) + if success: + assert res.status_code == 200 + assert res.data['status'] == status + else: + assert res.status_code == 400 + self.assert_api_access(False) + return res + + def assert_mfa_login(self, mfa_method, data=None, user=None, success=True): + self.assert_login(user=user or self.user_mfa, status='mfa-required') + if mfa_method.method_type == MFAMethodType.BACKUP: + res = self.client.post(reverse('auth-login-code'), data={ + 'id': str(mfa_method.id), + 'code': mfa_method.data['backup_codes'][0], + } | (data or {})) + elif mfa_method.method_type == MFAMethodType.TOTP: + res = self.client.post(reverse('auth-login-code'), data=data or { + 'id': str(mfa_method.id), + 'code': pyotp.TOTP(**mfa_method.data).now(), + }) + elif mfa_method.method_type == MFAMethodType.FIDO2: + pass + + if success: + assert res.status_code == 200 + self.assert_api_access(True) + else: + assert res.status_code == 400 + self.assert_api_access(False) + return res + + def test_login(self): + self.assert_login(user=self.user) + self.assert_api_access(True) + + def test_logout(self): + self.assert_login(self.user) + res = self.client.post(reverse('auth-logout')) + assert res.status_code == 204 + self.assert_api_access(False) + + def test_login_failure(self): + self.assert_login(user=self.user, password='invalid_password', success=False) + + def test_login_mfa(self): + self.assert_login(user=self.user_mfa, status='mfa-required') + self.assert_api_access(False) + + def test_login_timeout(self): + with mock_time(before=settings.MFA_LOGIN_TIMEOUT * 2): + self.assert_login(user=self.user_mfa, status='mfa-required') + res = self.client.post(reverse('auth-login-code'), data={ + 'id': str(self.mfa_totp.id), + 'code': pyotp.TOTP(**self.mfa_totp.data).now(), + }) + assert res.status_code == 400 + self.assert_api_access(False) + + def test_login_backup_code(self): + code = self.mfa_backup.data['backup_codes'][0] + res = self.assert_mfa_login(self.mfa_backup) + # Backup code invalidated + self.mfa_backup.refresh_from_db() + assert code not in self.mfa_backup.data['backup_codes'] + + def test_login_backup_code_failure(self): + self.assert_mfa_login(self.mfa_backup, data={'code': 'invalid'}, success=False) + + def test_login_totp(self): + self.assert_mfa_login(self.mfa_totp) + + def test_login_totp_failure(self): + self.assert_mfa_login(self.mfa_totp, data={'code': 'invalid'}, success=False) + + def test_login_mfa_method_of_other_user(self): + other_user = create_user() + other_mfa = MFAMethod.objects.create_totp(user=other_user) + self.assert_mfa_login(user=self.user_mfa, mfa_method=other_mfa, success=False) + + +@pytest.mark.django_db +class TestMfaMethodRegistration: + @pytest.fixture(autouse=True) + def setUp(self): + self.password = 'Password1!' + self.user = create_user(username='user', password=self.password) + self.client = api_client() + self.client.post(reverse('auth-login'), data={'username': self.user.username, 'password': self.password}) + self.client.post(reverse('auth-login'), data={'username': self.user.username, 'password': self.password}) + + def test_register_backup_codes(self): + res_begin = self.client.post(reverse('mfamethod-register-backup-begin', kwargs={'pentestuser_pk': 'self'})) + assert res_begin.status_code == 200 + res_complete = self.client.post(reverse('mfamethod-register-backup-complete', kwargs={'pentestuser_pk': 'self'})) + assert res_complete.status_code == 201 + assert self.user.mfa_methods.count() == 1 + mfa = self.user.mfa_methods.first() + assert mfa.method_type == MFAMethodType.BACKUP + assert mfa.data['backup_codes'] == res_begin.data['backup_codes'] + + def test_register_totp(self): + res_begin = self.client.post(reverse('mfamethod-register-totp-begin', kwargs={'pentestuser_pk': 'self'})) + assert res_begin.status_code == 200 + data_begin = omit_keys(res_begin.data, ['qrcode']) + res_complete = self.client.post(reverse('mfamethod-register-totp-complete', kwargs={'pentestuser_pk': 'self'}), data={ + 'code': pyotp.TOTP(**data_begin).now(), + }) + assert res_complete.status_code == 201 + assert self.user.mfa_methods.count() == 1 + mfa = self.user.mfa_methods.first() + assert mfa.method_type == MFAMethodType.TOTP + assert mfa.data == data_begin + + def test_reauthentication_timeout(self): + # Simple login + self.client.logout() + self.client.post(reverse('auth-login'), data={'username': self.user.username, 'password': self.password}) + res1 = self.client.post(reverse('mfamethod-register-backup-begin', kwargs={'pentestuser_pk': 'self'})) + assert res1.status_code == 403 + + # Re-authentication + self.client.post(reverse('auth-login'), data={'username': self.user.username, 'password': self.password}) + res2 = self.client.post(reverse('mfamethod-register-backup-begin', kwargs={'pentestuser_pk': 'self'})) + assert res2.status_code == 200 + + # Re-authentication timed out + with mock_time(after=settings.SENSITIVE_OPERATION_REAUTHENTICATION_TIMEOUT * 2): + res3 = self.client.post(reverse('mfamethod-register-backup-begin', kwargs={'pentestuser_pk': 'self'})) + assert res3.status_code == 403 + + +@pytest.mark.django_db +class TestEnableAdminPermissions: + @pytest.fixture(autouse=True) + def setUp(self): + self.project_not_member = create_project() + + self.password = 'Password1!' + self.user = create_user(is_superuser=True, password=self.password) + self.client = api_client() + self.client.post(reverse('auth-login'), data={'username': self.user.username, 'password': self.password}) + + def has_admin_access(self): + return self.client.get(reverse('pentestproject-detail', kwargs={'pk': self.project_not_member.pk})).status_code == 200 + + def test_enable_admin_permissions(self): + assert not self.has_admin_access() + + # Try without re-auth + res_privesc_failed = self.client.post(reverse('pentestuser-enable-admin-permissions')) + assert res_privesc_failed.status_code == 403 + assert res_privesc_failed.json()['code'] == 'reauth-required' + + # Re-authenticate + old_session_id = self.client.session.session_key + res_reauth = self.client.post(reverse('auth-login'), data={'username': self.user.username, 'password': self.password}) + assert res_reauth.status_code == 200 + assert self.client.session.session_key != old_session_id + + # Enable admin permissions + res_privesc_success = self.client.post(reverse('pentestuser-enable-admin-permissions')) + assert res_privesc_success.status_code == 200 + user_data = res_privesc_success.json() + assert user_data['id'] == str(self.user.id) + assert user_data['is_superuser'] + assert 'admin' in user_data['scope'] + assert self.client.session['admin_permissions_enabled'] + + assert self.has_admin_access() + + def test_disable_admin_permissions(self): + session = self.client.session + session['admin_permissions_enabled'] = True + session.save() + + assert self.has_admin_access() + + res = self.client.post(reverse('pentestuser-disable-admin-permissions')) + assert res.status_code == 200 + user_data = res.json() + assert user_data['id'] == str(self.user.id) + assert user_data['is_superuser'] + assert 'admin' not in user_data['scope'] + assert not self.client.session.get('admin_permissions_enabled') + + assert not self.has_admin_access() + diff --git a/api/src/reportcreator_api/tests/test_checks.py b/api/src/reportcreator_api/tests/test_checks.py new file mode 100644 index 0000000..d09201f --- /dev/null +++ b/api/src/reportcreator_api/tests/test_checks.py @@ -0,0 +1,165 @@ +import pytest +from reportcreator_api.pentests.models import ReviewStatus +from reportcreator_api.tests.mock import create_finding, create_project, create_project_type +from reportcreator_api.utils.error_messages import MessageLevel, MessageLocationInfo, MessageLocationType, ErrorMessage + + +pytestmark = pytest.mark.django_db + + +def assertContainsCheckResults(actual, expected): + for e in expected: + for a in actual: + if e.message == a.message and e.location.type == a.location.type and e.location.id == a.location.id and e.location.path == a.location.path: + break + else: + assert False, f'{e} not in check results' + + +def assertNotContainsCheckResults(actual, expected): + for e in expected: + for a in actual: + if e.message == a.message and e.location.type == a.location.type and e.location.id == a.location.id and e.location.path == a.location.path: + assert False, f'{e} in check results' + + +def set_all_required(definiton, required): + if definiton.get('type'): + definiton['required'] = required + if definiton['type'] == 'object': + set_all_required(definiton['properties'], required) + elif definiton['type'] == 'list': + set_all_required(definiton['items'], required) + elif isinstance(definiton, dict): + for k, d in definiton.items(): + set_all_required(d, required) + + +def test_check_todo(): + todo_fields = { + 'field_string': 'TODO: content', + 'field_markdown': 'Multiline markdown \nwith ![image](To-do) in markdown\n\n* item1\n* TODO: more items', + 'field_list': ['item1', 'ToDo: more items'], + 'field_object': {'nested1': 'nested todo in object'}, + 'field_list_objects': [{'nested1': 'TODO'}], + } + todo_field_paths = ['field_string', 'field_markdown', 'field_list[1]', 'field_object.nested1', 'field_list_objects[0].nested1'] + project = create_project(report_data=todo_fields) + finding = create_finding(project=project, data=todo_fields) + + assertContainsCheckResults(project.perform_checks(), [ + ErrorMessage(level=MessageLevel.WARNING, message='Unresolved TODO', location=MessageLocationInfo(type=MessageLocationType.SECTION, id='other', path=p)) + for p in todo_field_paths + ] + [ + ErrorMessage(level=MessageLevel.WARNING, message='Unresolved TODO', location=MessageLocationInfo(type=MessageLocationType.FINDING, id=finding.finding_id, path=p)) + for p in todo_field_paths + ]) + + +def test_check_empty(): + empty_fields = { + 'field_string': '', + 'field_markdown': '', + 'field_int': None, + 'field_date': None, + 'field_enum': None, + 'field_user': None, + 'field_list': [], + 'field_object': {'nested1': ''}, + 'field_list_objects': [{'nested1': ''}], + } + empty_field_paths = [ + 'field_string', 'field_markdown', 'field_int', 'field_date', 'field_enum', 'field_user', + 'field_list', 'field_object.nested1', 'field_list_objects[0].nested1' + ] + project_type = create_project_type() + set_all_required(project_type.report_fields, True) + set_all_required(project_type.finding_fields, True) + project_type.save() + project = create_project(project_type=project_type, report_data=empty_fields) + finding = create_finding(project=project, data=empty_fields) + + assertContainsCheckResults(project.perform_checks(), [ + ErrorMessage(level=MessageLevel.WARNING, message='Empty field', location=MessageLocationInfo(type=MessageLocationType.SECTION, id='other', path=p)) + for p in empty_field_paths + ] + [ + ErrorMessage(level=MessageLevel.WARNING, message='Empty field', location=MessageLocationInfo(type=MessageLocationType.FINDING, id=finding.finding_id, path=p)) + for p in empty_field_paths + ]) + + +def test_check_empty_not_required(): + empty_fields = { + 'field_string': '', + 'field_markdown': '', + 'field_int': None, + 'field_date': None, + 'field_enum': None, + 'field_user': None, + 'field_list': [], + 'field_object': {'nested1': ''}, + 'field_list_objects': [{'nested1': ''}], + } + empty_field_paths = [ + 'field_string', 'field_markdown', 'field_int', 'field_date', 'field_enum', 'field_user', + 'field_list', 'field_object.nested1', 'field_list_objects[0].nested1' + ] + project_type = create_project_type() + set_all_required(project_type.report_fields, False) + set_all_required(project_type.finding_fields, False) + project_type.save() + project = create_project(project_type=project_type, report_data=empty_fields) + finding = create_finding(project=project, data=empty_fields) + + assertNotContainsCheckResults(project.perform_checks(), [ + ErrorMessage(level=MessageLevel.WARNING, message='Empty field', location=MessageLocationInfo(type=MessageLocationType.SECTION, id='other', path=p)) + for p in empty_field_paths + ] + [ + ErrorMessage(level=MessageLevel.WARNING, message='Empty field', location=MessageLocationInfo(type=MessageLocationType.FINDING, id=finding.finding_id, path=p)) + for p in empty_field_paths + ]) + + +def test_invalid_cvss(): + project = create_project() + finding_valid1 = create_finding(project=project, data={'cvss': 'CVSS:3.0/AV:N/AC:L/PR:N/UI:N/S:C/C:H/I:H/A:H'}) + finding_valid2 = create_finding(project=project, data={'cvss': 'AV:N/AC:L/Au:N/C:C/I:C/A:C'}) + finding_valid3 = create_finding(project=project, data={'cvss': 'n/a'}) + finding_invalid1 = create_finding(project=project, data={'cvss': 'CVSS:3.1/asdf'}) + finding_invalid2 = create_finding(project=project, data={'cvss': 'invalid CVSS'}) + + assertContainsCheckResults(project.perform_checks(), [ + ErrorMessage(level=MessageLevel.WARNING, message='Invalid CVSS vector', location=MessageLocationInfo(type=MessageLocationType.FINDING, id=f.finding_id, path='cvss')) + for f in [finding_invalid1, finding_invalid2] + ]) + assertNotContainsCheckResults(project.perform_checks(), [ + ErrorMessage(level=MessageLevel.WARNING, message='Invalid CVSS vector', location=MessageLocationInfo(type=MessageLocationType.FINDING, id=f.finding_id, path='cvss')) + for f in [finding_valid1, finding_valid2, finding_valid3] + ]) + + +def test_review_status(): + project = create_project() + finding_valid = create_finding(project=project, status=ReviewStatus.FINISHED) + finding_invalid1 = create_finding(project=project, status=ReviewStatus.IN_PROGRESS) + finding_invalid2 = create_finding(project=project, status=ReviewStatus.READY_FOR_REVIEW) + finding_invalid3 = create_finding(project=project, status=ReviewStatus.NEEDS_IMPROVEMENT) + + section_valid = project.sections.first() + section_valid.status = ReviewStatus.FINISHED + section_valid.save() + section_invalid = project.sections.exclude(id=section_valid.id).first() + section_invalid.status = ReviewStatus.IN_PROGRESS + section_invalid.save() + + assertContainsCheckResults(project.perform_checks(), [ + ErrorMessage(level=MessageLevel.WARNING, message='Status is not "finished"', location=MessageLocationInfo(type=MessageLocationType.FINDING, id=f.finding_id)) + for f in [finding_invalid1, finding_invalid2, finding_invalid3] + ] + [ + ErrorMessage(level=MessageLevel.WARNING, message='Status is not "finished"', location=MessageLocationInfo(type=MessageLocationType.SECTION, id=s.section_id)) + for s in [section_invalid] + ]) + assertNotContainsCheckResults(project.perform_checks(), [ + ErrorMessage(level=MessageLevel.WARNING, message='Status is not "finished"', location=MessageLocationInfo(type=MessageLocationType.FINDING, id=finding_valid.finding_id)), + ErrorMessage(level=MessageLevel.WARNING, message='Status is not "finished"', location=MessageLocationInfo(type=MessageLocationType.SECTION, id=section_valid.section_id)), + ]) \ No newline at end of file diff --git a/api/src/reportcreator_api/tests/test_crypto.py b/api/src/reportcreator_api/tests/test_crypto.py new file mode 100644 index 0000000..202c963 --- /dev/null +++ b/api/src/reportcreator_api/tests/test_crypto.py @@ -0,0 +1,580 @@ +import base64 +import io +import json +import random +import sys +import zipfile +import pytest +from unittest import mock +from uuid import UUID +from contextlib import contextmanager +from django.db import connection +from django.forms import model_to_dict +from django.test import override_settings +from django.urls import reverse +from django.http import StreamingHttpResponse +from django.conf import settings +from django.core import serializers +from django.core import management +from django.core.files.storage import storages, FileSystemStorage + +from reportcreator_api.archive import crypto +from reportcreator_api.archive.crypto import pgp +from reportcreator_api.notifications.models import NotificationSpec +from reportcreator_api.pentests.models import FindingTemplate, PentestFinding, PentestProject, ProjectType, \ + UploadedAsset, UploadedImage, UploadedProjectFile, \ + ArchivedProject, ArchivedProjectKeyPart, UserPublicKey +from reportcreator_api.management.commands import encryptdata +from reportcreator_api.tests.mock import api_client, create_archived_project, create_project, create_public_key, create_template, create_user, create_project_type +from reportcreator_api.users.models import PentestUser +from reportcreator_api.utils.storages import EncryptedFileSystemStorage + + +def assert_db_field_encrypted(query, expected): + with connection.cursor() as cursor: + cursor.execute(*query.query.as_sql(compiler=query.query.compiler, connection=connection)) + row = cursor.fetchone() + assert row[0].tobytes().startswith(crypto.MAGIC) == expected + + +def assert_storage_file_encrypted(file, expected): + with file.open(mode='rb').file.file.fileobj as f: + f.seek(0) + assert f.read().startswith(crypto.MAGIC) == expected + + +class TestSymmetricEncryptionTests: + @pytest.fixture(autouse=True) + def setUp(self) -> None: + self.key = crypto.EncryptionKey(id='test-key', key=b'a' * (256 // 8)) + self.nonce = b'n' * 16 + self.plaintext = b'This is a plaintext content which will be encrypted in unit tests. ' + (b'a' * 100) + b' lorem impsum long text' + + with override_settings(ENCRYPTION_PLAINTEXT_FALLBACK=True): + yield + + def encrypt(self, pt): + enc = io.BytesIO() + with crypto.open(fileobj=enc, mode='w', key=self.key, nonce=self.nonce) as c: + c.write(pt) + return enc.getvalue() + + @contextmanager + def open_decrypt(self, ct, **kwargs): + with crypto.open(fileobj=io.BytesIO(ct), mode='r', keys={self.key.id: self.key}, **kwargs) as c: + yield c + + def decrypt(self, ct, **kwargs): + with self.open_decrypt(ct, **kwargs) as c: + return c.read() + + def modify_metadata(self, enc, m): + ct_start_index = enc.index(b'\x00') + metadata = json.loads(enc[len(crypto.MAGIC):ct_start_index].decode()) + metadata |= m + return crypto.MAGIC + json.dumps(metadata).encode() + enc[ct_start_index:] + + def test_encryption_decryption(self): + enc = self.encrypt(self.plaintext) + assert enc.startswith(crypto.MAGIC) + dec = self.decrypt(enc) + assert dec == self.plaintext + + def test_encryption_chunked(self): + enc = io.BytesIO() + with crypto.open(enc, 'w', key=self.key, nonce=self.nonce) as c: + for b in self.plaintext: + c.write(bytes([b])) + assert enc.getvalue() == self.encrypt(self.plaintext) + assert self.decrypt(enc.getvalue()) == self.plaintext + + def test_decryptions_chunked(self): + dec = b'' + with self.open_decrypt(self.encrypt(self.plaintext)) as c: + while b := c.read(1): + dec += b + assert dec == self.plaintext + + def test_read_plaintext(self): + dec = self.decrypt(self.plaintext) + assert dec == self.plaintext + + def test_write_plaintext(self): + enc = io.BytesIO() + with crypto.open(enc, mode='w', key=None) as c: + c.write(self.plaintext) + assert enc.getvalue() == self.plaintext + + def test_verify_payload(self): + enc = bytearray(self.encrypt(self.plaintext)) + enc[100] = (enc[100] + 10) & 0xFF # Modify ciphertext + with pytest.raises(crypto.CryptoError): + self.decrypt(enc) + + def test_verify_header(self): + enc = self.encrypt(self.plaintext) + modified = self.modify_metadata(enc, {'added_field': 'new'}) + with pytest.raises(crypto.CryptoError): + self.decrypt(modified) + + def test_verify_key(self): + enc = self.encrypt(self.plaintext) + modified = self.modify_metadata(enc, {'nonce': base64.b64encode(b'x' * 16).decode()}) + with pytest.raises(crypto.CryptoError): + self.decrypt(modified) + + def test_missing_metadata(self): + enc = self.encrypt(self.plaintext)[:10] + with pytest.raises(crypto.CryptoError): + self.decrypt(enc) + + def test_corrupted_magic(self): + enc = self.encrypt(self.plaintext) + enc = b'\x00\x00' + enc[2:] + assert self.decrypt(enc) == enc + + def test_partial_magic(self): + enc = crypto.MAGIC[2:] + assert self.decrypt(enc) == enc + + def test_missing_tag(self): + enc = self.encrypt(self.plaintext) + enc = enc[:enc.index(b'\x00') + 3] + with pytest.raises(crypto.CryptoError): + self.decrypt(enc) + + def test_encrypt_empty(self): + enc = self.encrypt(b'') + assert enc.startswith(crypto.MAGIC) + assert self.decrypt(enc) == b'' + + def test_decryption_seek(self): + enc = self.encrypt(self.plaintext) + with self.open_decrypt(enc) as c: + c.seek(20, io.SEEK_SET) + assert c.tell() == 20 + assert c.read(5) == self.plaintext[20:25] + assert c.tell() == 25 + + assert c.seek(0, io.SEEK_CUR) == 25 + assert c.tell() == 25 + assert c.read(5) == self.plaintext[25:30] + + c.seek(0, io.SEEK_END) + assert c.tell() == len(self.plaintext) + assert c.read(5) == b'' + + c.seek(c.tell() - 5, io.SEEK_SET) + assert c.tell() == len(self.plaintext) - 5 + assert c.read(5) == self.plaintext[-5:] + + c.seek(0, io.SEEK_SET) + assert c.tell() == 0 + assert c.read(5) == self.plaintext[:5] + + def test_encrypt_revoked_key(self): + self.key.revoked = True + with pytest.raises(crypto.CryptoError): + self.encrypt(self.plaintext) + + def test_decrypt_revoked_key(self): + enc = self.encrypt(self.plaintext) + self.key.revoked = True + with pytest.raises(crypto.CryptoError): + self.decrypt(enc) + + def test_plaintext_fallback_disabled_encryption(self): + with pytest.raises(crypto.CryptoError): + enc = io.BytesIO() + with crypto.open(fileobj=enc, mode='w', key=None, plaintext_fallback=False) as c: + c.write(self.plaintext) + + def test_plaintext_fallback_disabled_decryption(self): + with pytest.raises(crypto.CryptoError): + self.decrypt(self.plaintext, plaintext_fallback=False) + + +class TestEncryptedStorage: + @pytest.fixture(autouse=True) + def setUp(self) -> None: + self.storage_plain = FileSystemStorage(location='/tmp/test/') + self.storage_crypto = EncryptedFileSystemStorage(location='/tmp/test/') + self.plaintext = b'This is a test file content which should be encrypted' + + with override_settings( + ENCRYPTION_KEYS={'test-key': crypto.EncryptionKey(id='test-key', key=b'a' * 32)}, + DEFAULT_ENCRYPTION_KEY_ID='test-key', + ENCRYPTION_PLAINTEXT_FALLBACK=True, + ): + yield + + def test_save(self): + filename = self.storage_crypto.save('test.txt', io.BytesIO(self.plaintext)) + assert str(UUID(filename.replace('/', ''))) != 'test.txt' + enc = self.storage_plain.open(filename, mode='rb').read() + assert enc.startswith(crypto.MAGIC) + dec = self.storage_crypto.open(filename, mode='rb').read() + assert dec == self.plaintext + + def test_open(self): + with self.storage_crypto.open('test.txt', mode='wb') as f: + filename = f.name + f.write(self.plaintext) + + enc = self.storage_plain.open(filename, mode='rb').read() + assert enc.startswith(crypto.MAGIC) + dec = self.storage_crypto.open(filename, mode='rb').read() + assert dec == self.plaintext + + def test_size(self): + with self.storage_crypto.open('test.txt', mode='wb') as f: + filename = f.name + f.write(self.plaintext) + + assert self.storage_crypto.size(filename) == len(self.storage_crypto.open(filename, mode='rb').read()) + + +@pytest.mark.django_db +class TestEncryptedDbField: + @pytest.fixture(autouse=True) + def setUp(self) -> None: + self.template = create_template() + project = create_project() + self.finding = project.findings.first() + self.user = create_user() + + with override_settings( + ENCRYPTION_KEYS={'test-key': crypto.EncryptionKey(id='test-key', key=b'a' * 32)}, + DEFAULT_ENCRYPTION_KEY_ID='test-key', + ENCRYPTION_PLAINTEXT_FALLBACK=True + ): + yield + + def test_transparent_encryption(self): + # Test transparent encryption/decryption. No encrypted data should be returned to caller + data_dict = {'test': 'content'} + self.finding.custom_fields = data_dict + self.finding.template_id = self.template.id + self.finding.save() + self.user.set_password('pwd') + self.user.save() + + assert_db_field_encrypted(PentestFinding.objects.filter(id=self.finding.id).values('custom_fields'), True) + + f = PentestFinding.objects.filter(id=self.finding.id).get() + assert f.custom_fields == data_dict + assert f.template_id == self.template.id + assert self.user.check_password('pwd') + + def test_data_stored_encrypted(self): + self.finding.custom_fields = {'test': 'content'} + self.finding.template_id = self.template.id + self.finding.save() + + assert_db_field_encrypted(PentestFinding.objects.filter(id=self.finding.id).values('custom_fields'), True) + + @override_settings(DEFAULT_ENCRYPTION_KEY_ID=None) + def test_db_encryption_disabled(self): + self.finding.custom_fields = {'test': 'content'} + self.finding.template_id = self.template.id + self.finding.save() + + assert_db_field_encrypted(PentestFinding.objects.filter(id=self.finding.id).values('custom_fields'), False) + + +@pytest.mark.django_db +class TestEncryptDataCommand: + @pytest.fixture(autouse=True) + def setUp(self) -> None: + with override_settings( + ENCRYPTION_KEYS={}, + DEFAULT_ENCRYPTION_KEY_ID=None, + ENCRYPTION_PLAINTEXT_FALLBACK=True, + STORAGES=settings.STORAGES | { + 'uploaded_images': {'BACKEND': 'reportcreator_api.utils.storages.EncryptedInMemoryStorage', 'OPTIONS': {'location': '/tmp/uploadedimages'}}, + 'uploaded_assets': {'BACKEND': 'reportcreator_api.utils.storages.EncryptedInMemoryStorage', 'OPTIONS': {'location': '/tmp/uploadedassets'}}, + 'uploaded_files': {'BACKEND': 'reportcreator_api.utils.storages.EncryptedInMemoryStorage', 'OPTIONS': {'location': '/tmp/uploadedfiles'}}, + } + ): + UploadedImage.file.field.storage = storages['uploaded_images'] + UploadedAsset.file.field.storage = storages['uploaded_assets'] + UploadedProjectFile.file.field.storage = storages['uploaded_files'] + self.project = create_project() + yield + + @override_settings( + ENCRYPTION_KEYS={'test-key': crypto.EncryptionKey(id='test-key', key=b'a' * 32)}, + DEFAULT_ENCRYPTION_KEY_ID='test-key', + ) + def test_command(self): + management.call_command(encryptdata.Command()) + + p = PentestProject.objects.filter(id=self.project.id) + assert_db_field_encrypted(p.values('custom_fields'), True) + for i in p.get().images.all(): + assert_db_field_encrypted(UploadedImage.objects.filter(id=i.id).values('name'), True) + assert_storage_file_encrypted(i.file, True) + for f in p.get().files.all(): + assert_db_field_encrypted(UploadedProjectFile.objects.filter(id=f.id).values('name'), True) + assert_storage_file_encrypted(f.file, True) + + pt = ProjectType.objects.filter(id=self.project.project_type.id) + assert_db_field_encrypted(pt.values('report_template'), True) + assert_db_field_encrypted(pt.values('report_styles'), True) + assert_db_field_encrypted(pt.values('report_preview_data'), True) + for a in pt.get().assets.all(): + assert_db_field_encrypted(UploadedAsset.objects.filter(id=a.id).values('name'), True) + assert_storage_file_encrypted(a.file, True) + + +@pytest.mark.django_db +class TestBackup: + @pytest.fixture(autouse=True) + def setUp(self): + self.backup_key = 'a' * 30 + with override_settings( + BACKUP_KEY=self.backup_key, + ENCRYPTION_KEYS={'test-key': crypto.EncryptionKey(id='test-key', key=b'a' * 32)}, + DEFAULT_ENCRYPTION_KEY_ID='test-key', + ENCRYPTION_PLAINTEXT_FALLBACK=False, + ): + self.user_system = create_user(is_system_user=True) + + # Data to be backed up + self.user = create_user(mfa=True) + self.project = create_project() + self.project_type = create_project_type() + self.template = create_template() + self.archived_project = create_archived_project() + self.notification = NotificationSpec.objects.create(title='test', text='test') + + yield + + def assert_backup_obj(self, backup, obj): + data = next(filter(lambda e: e.object.pk == obj.pk, backup)) + assert data.object == obj + assert model_to_dict(data.object) == model_to_dict(obj) + return data + + def assert_backup_file(self, backup, z, dir, obj, stored_encrypted=False): + self.assert_backup_obj(backup, obj) + bak_img = z.read(f'{dir}/{obj.file.name}') + assert bak_img.startswith(crypto.MAGIC) == stored_encrypted + assert bak_img == obj.file.open('rb').read() + + def assert_backup(self, content): + with zipfile.ZipFile(io.BytesIO(content), mode='r') as z: + # Test that data is not encrypted in backup + assert crypto.MAGIC not in z.read('backup.jsonl') + backup = list(serializers.deserialize('jsonl', z.read('backup.jsonl'))) + + # Test if objects are present in backup + self.assert_backup_obj(backup, self.project) + self.assert_backup_obj(backup, self.project.findings.first()) + self.assert_backup_obj(backup, self.project.sections.first()) + self.assert_backup_obj(backup, self.project.notes.first()) + self.assert_backup_obj(backup, self.project_type) + self.assert_backup_obj(backup, self.template) + self.assert_backup_obj(backup, self.user.notes.first()) + self.assert_backup_obj(backup, self.user.mfa_methods.first()) + self.assert_backup_obj(backup, self.archived_project) + self.assert_backup_obj(backup, self.notification) + self.assert_backup_obj(backup, self.user.notifications.first()) + + self.assert_backup_file(backup, z, 'uploadedimages', self.project.images.all().first()) + self.assert_backup_file(backup, z, 'uploadedimages', self.user.images.all().first()) + self.assert_backup_file(backup, z, 'uploadedassets', self.project_type.assets.all().first()) + self.assert_backup_file(backup, z, 'uploadedfiles', self.project.files.first()) + self.assert_backup_file(backup, z, 'archivedfiles', self.archived_project, stored_encrypted=True) + + def backup_request(self, user=None, backup_key=None, aes_key=None): + if not user: + user = self.user_system + if not backup_key: + backup_key = self.backup_key + return api_client(user).post(reverse('utils-backup'), data={'key': backup_key, 'aes_key': base64.b64encode(aes_key).decode() if aes_key else None}) + + def test_backup(self): + # Create backup + res = self.backup_request() + assert res.status_code == 200 + assert isinstance(res, StreamingHttpResponse) + z = b''.join(res.streaming_content) + self.assert_backup(z) + + def test_backup_restore(self): + # Create backup + backup = b''.join(self.backup_request().streaming_content) + + # Delete data + PentestProject.objects.all().delete() + ArchivedProject.objects.all().delete() + ProjectType.objects.all().delete() + FindingTemplate.objects.all().delete() + PentestUser.objects.all().delete() + + # Restore backup + with zipfile.ZipFile(io.BytesIO(backup), 'r') as z: + with mock.patch.object(sys, 'stdin', io.StringIO(z.read('backup.jsonl').decode())): + management.call_command('loaddata', '-', format='jsonl') + + # Validate restored data + self.project.refresh_from_db() + self.project_type.refresh_from_db() + self.template.refresh_from_db() + self.user.refresh_from_db() + self.notification.refresh_from_db() + + def test_backup_permissions(self): + user_regular = create_user() + assert self.backup_request(user=user_regular).status_code == 403 + superuser = create_user(is_superuser=True) + assert self.backup_request(user=superuser).status_code == 403 + + def test_invalid_backup_key(self): + assert self.backup_request(backup_key=b'invalid' * 10).status_code == 400 + + def test_backup_encryption(self): + aes_key = b'a' * 32 + res = self.backup_request(aes_key=aes_key) + assert res.status_code == 200 + assert isinstance(res, StreamingHttpResponse) + enc = b''.join(res.streaming_content) + assert enc.startswith(crypto.MAGIC) + with crypto.open(fileobj=io.BytesIO(enc), key=crypto.EncryptionKey(id=None, key=aes_key)) as c: + assert c.metadata['key_id'] is None + z = c.read() + self.assert_backup(z) + + +@pytest.mark.django_db +class TestProjectArchivingEncryption: + @pytest.fixture(autouse=True) + def setUp(self): + with pgp.create_gpg() as self.gpg: + yield + + def create_user_with_private_key(self, **kwargs): + user = create_user(public_key=False, **kwargs) + master_key = self.gpg.gen_key(self.gpg.gen_key_input( + key_type='EdDSA', + key_curve='ed25519', + no_protection=True, + subkey_type='ECDH', + subkey_curve='nistp384', + )) + public_key_pem = self.gpg.export_keys(master_key.fingerprint) + create_public_key(user=user, public_key=public_key_pem) + return user + + def test_register_public_key(self): + user = create_user() + client = api_client(user) + + master_key = self.gpg.gen_key(self.gpg.gen_key_input( + key_type='EdDSA', + key_curve='ed25519', + no_protection=True, + subkey_type='ECDH', + subkey_curve='nistp384', + )) + public_key_pem = self.gpg.export_keys(master_key.fingerprint) + res = client.post(reverse('userpublickey-register-begin', kwargs={'pentestuser_pk': 'self'}), data={ + 'name': 'Test Public Key', + 'public_key': public_key_pem, + }) + assert res.status_code == 200 + assert res.data['status'] == 'verify-key' + + verification_decrypted = self.gpg.decrypt(res.data['verification']) + res = client.post(reverse('userpublickey-register-complete', kwargs={'pentestuser_pk': 'self'}), data={ + 'verification': verification_decrypted.data.decode(), + }) + assert res.status_code == 201 + user_public_key = UserPublicKey.objects.get(id=res.data['id']) + assert user_public_key.public_key == public_key_pem + + def test_delete_public_key(self): + user = create_user(public_key=True) + archive = create_archived_project(project=create_project(members=[user], readonly=True)) + client = api_client(user) + + # public key used in archive + res1 = client.delete(reverse('userpublickey-detail', kwargs={'pentestuser_pk': 'self', 'pk': user.public_keys.first().id})) + assert res1.status_code == 400 + + # public key not used in archived + archive.delete() + res2 = client.delete(reverse('userpublickey-detail', kwargs={'pentestuser_pk': 'self', 'pk': user.public_keys.first().id})) + assert res2.status_code == 204 + + @pytest.mark.parametrize(['expected', 'threshold', 'num_users_with_key', 'num_users_without_key'], [ + (False, 1, 0, 2), # no users with key + (False, 2, 1, 2), # too few users with key + (False, 5, 3, 0), # threshold too high + (True, 2, 3, 1), + ]) + def test_archiving_validation(self, expected, threshold, num_users_with_key, num_users_without_key): + with override_settings(ARCHIVING_THRESHOLD=threshold): + users = [create_user(public_key=True) for _ in range(num_users_with_key)] + \ + [create_user(public_key=False) for _ in range(num_users_without_key)] + project = create_project(members=users, readonly=True) + res = api_client(users[0]).post(reverse('pentestproject-archive', kwargs={'pk': project.pk})) + assert (res.status_code == 201) == expected + + @override_settings(ARCHIVING_THRESHOLD=2) + def test_archiving_dearchiving(self): + user_regular = self.create_user_with_private_key() + user_archiver1 = self.create_user_with_private_key(is_global_archiver=True) + user_archiver2 = self.create_user_with_private_key(is_global_archiver=True) + user_without_key = create_user() + project = create_project(members=[user_regular, user_archiver1, user_without_key], readonly=True) + + client = api_client(user_regular) + res = client.post(reverse('pentestproject-archive', kwargs={'pk': project.pk})) + assert res.status_code == 201 + + archive = ArchivedProject.objects.get(id=res.data['id']) + assert archive.threshold == 2 + assert archive.name == project.name + assert archive.key_parts.count() == 3 + assert set(archive.key_parts.values_list('user_id', flat=True)) == {user_regular.id, user_archiver1.id, user_archiver2.id} + assert not PentestProject.objects.filter(id=project.id).exists() + + # Decrypt first keypart + keypart1 = archive.key_parts.get(user=user_regular) + keypart_kwargs1 = {'archivedproject_pk': archive.id, 'pk': keypart1.id} + res_k1 = client.get(reverse('archivedprojectkeypart-public-key-encrypted-data', kwargs=keypart_kwargs1)) + assert res_k1.status_code == 200 + res_d1 = client.post(reverse('archivedprojectkeypart-decrypt', kwargs=keypart_kwargs1), data={ + 'data': self.gpg.decrypt(res_k1.data[0]['encrypted_data']).data.decode() + }) + assert res_d1.status_code == 200 + assert res_d1.data['status'] == 'key-part-decrypted' + keypart1.refresh_from_db() + assert keypart1.is_decrypted + + # Decrypt second keypart => restores whole project + client2 = api_client(user_archiver2) + keypart2 = archive.key_parts.get(user=user_archiver2) + keypart_kwargs2 = {'archivedproject_pk': archive.id, 'pk': keypart2.id} + res_k2 = client2.get(reverse('archivedprojectkeypart-public-key-encrypted-data', kwargs=keypart_kwargs2)) + assert res_k2.status_code == 200 + res_d2 = client2.post(reverse('archivedprojectkeypart-decrypt', kwargs=keypart_kwargs2), data={ + 'data': self.gpg.decrypt(res_k2.data[0]['encrypted_data']).data.decode() + }) + assert res_d2.status_code == 200 + assert res_d2.data['status'] == 'project-restored' + assert not ArchivedProject.objects.filter(id=archive.id).exists() + + project_restored = PentestProject.objects.get(id=res_d2.data['project_id']) + assert project_restored.name == project.name + + def test_decrypt_wrong_key(self): + user = create_user(public_key=True) + archive = create_archived_project(project=create_project(members=[user], readonly=True)) + + res = api_client(user).post(reverse('archivedprojectkeypart-decrypt', kwargs={'archivedproject_pk': archive.id, 'pk': archive.key_parts.first().id}), { + 'data': base64.b64encode(random.randbytes(32)).decode(), + }) + assert res.status_code == 400 diff --git a/api/src/reportcreator_api/tests/test_customfields.py b/api/src/reportcreator_api/tests/test_customfields.py new file mode 100644 index 0000000..4ded551 --- /dev/null +++ b/api/src/reportcreator_api/tests/test_customfields.py @@ -0,0 +1,486 @@ +import itertools +import pytest +from django.test import override_settings +from django.core.exceptions import ValidationError +from reportcreator_api.pentests.customfields.mixins import CustomFieldsMixin +from reportcreator_api.pentests.customfields.predefined_fields import FINDING_FIELDS_CORE, FINDING_FIELDS_PREDEFINED, REPORT_FIELDS_CORE, finding_fields_default + +from reportcreator_api.pentests.customfields.types import FieldDataType, field_definition_to_dict, parse_field_definition +from reportcreator_api.pentests.customfields.validators import FieldDefinitionValidator, FieldValuesValidator +from reportcreator_api.pentests.customfields.utils import check_definitions_compatible +from reportcreator_api.pentests.models import FindingTemplate +from reportcreator_api.tests.mock import create_finding, create_project_type, create_project, create_template, create_user +from reportcreator_api.utils.utils import copy_keys + + + +@pytest.mark.parametrize('valid,definition', [ + (True, {}), + (False, {'f': {}}), + (False, {'f': {'type': 'string'}}), + # Test field id + (True, {'field1': {'type': 'string', 'label': 'Field 1', 'default': None}}), + (True, {'fieldNumber_one': {'type': 'string', 'label': 'Field 1', 'default': None}}), + (False, {'field 1': {'type': 'string', 'label': 'Field 1', 'default': None}}), + (False, {'field.one': {'type': 'string', 'label': 'Field 1', 'default': None}}), + (False, {'1st_field': {'type': 'string', 'label': 'Field 1', 'default': None}}), + # Test data types + (True, { + 'field_string': {'type': 'string', 'label': 'String Field', 'default': 'test'}, + 'field_markdown': {'type': 'markdown', 'label': 'Markdown Field', 'default': '# test\nmarkdown'}, + 'field_cvss': {'type': 'cvss', 'label': 'CVSS Field', 'default': 'n/a'}, + 'field_date': {'type': 'date', 'label': 'Date Field', 'default': '2022-01-01'}, + 'field_int': {'type': 'number', 'label': 'Number Field', 'default': 10}, + 'field_bool': {'type': 'boolean', 'label': 'Boolean Field', 'default': False}, + 'field_enum': {'type': 'enum', 'label': 'Enum Field', 'choices': [{'value': 'enum1', 'label': 'Enum Value 1'}, {'value': 'enum2', 'label': 'Enum Value 2'}], 'default': 'enum2'}, + 'field_combobox': {'type': 'combobox', 'label': 'Combobox Field', 'suggestions': ['value 1', 'value 2'], 'default': 'value1'}, + 'field_user': {'type': 'user', 'label': 'User Field'}, + 'field_object': {'type': 'object', 'label': 'Nested Object', 'properties': {'nested1': {'type': 'string', 'label': 'Nested Field'}}}, + 'field_list': {'type': 'list', 'label': 'List Field', 'items': {'type': 'string'}}, + 'field_list_objects': {'type': 'list', 'label': 'List of nested objects', 'items': {'type': 'object', 'properties': {'nested1': {'type': 'string', 'label': 'Nested object field', 'default': None}}}}, + }), + (False, {'f': {'type': 'unknown', 'label': 'Unknown'}}), + (False, {'f': {'type': 'date', 'label': 'Date', 'default': 'not a date'}}), + (False, {'f': {'type': 'number', 'label': 'Number', 'default': 'not an int'}}), + (False, {'f': {'type': 'enum', 'label': 'Enum Filed'}}), + (False, {'f': {'type': 'enum', 'label': 'Enum Field', 'choices': []}}), + (False, {'f': {'type': 'enum', 'label': 'Enum Field', 'choices': [{'value': 'v1'}]}}), + (False, {'f': {'type': 'enum', 'label': 'Enum Field', 'choices': [{'value': None}]}}), + (False, {'f': {'type': 'enum', 'label': 'Enum Field', 'choices': [{'label': 'Name only'}]}}), + (False, {'f': {'type': 'combobox'}}), + (False, {'f': {'type': 'combobox', 'suggestions': [None]}}), + (False, {'f': {'type': 'object', 'label': 'Object Field'}}), + (False, {'f': {'type': 'object', 'label': 'Object Field', 'properties': {'adsf': {}}}}), + (False, {'f': {'type': 'list', 'label': 'List Field'}}), + (False, {'f': {'type': 'list', 'label': 'List Field', 'items': {}}}), +]) +def test_definition_formats(valid, definition): + res_valid = True + try: + FieldDefinitionValidator()(definition) + except ValidationError as ex: + res_valid = False + assert res_valid == valid + + +@pytest.mark.parametrize('valid,definition,value', [ + (True, { + 'field_string': {'type': 'string', 'label': 'String Field', 'default': 'test'}, + 'field_string2': {'type': 'string', 'label': 'String Field', 'default': None}, + 'field_markdown': {'type': 'markdown', 'label': 'Markdown Field', 'default': '# test\nmarkdown'}, + 'field_cvss': {'type': 'cvss', 'label': 'CVSS Field', 'default': 'n/a'}, + 'field_date': {'type': 'date', 'label': 'Date Field', 'default': '2022-01-01'}, + 'field_int': {'type': 'number', 'label': 'Number Field', 'default': 10}, + 'field_bool': {'type': 'boolean', 'label': 'Boolean Field', 'default': False}, + 'field_enum': {'type': 'enum', 'label': 'Enum Field', 'choices': [{'value': 'enum1', 'label': 'Enum Value 1'}, {'value': 'enum2', 'label': 'Enum Value 2'}], 'default': 'enum2'}, + 'field_combobox': {'type': 'combobox', 'lable': 'Combobox Field', 'suggestions': ['a', 'b']}, + 'field_object': {'type': 'object', 'label': 'Nested Object', 'properties': {'nested1': {'type': 'string', 'label': 'Nested Field'}}}, + 'field_list': {'type': 'list', 'label': 'List Field', 'items': {'type': 'string'}}, + 'field_list_objects': {'type': 'list', 'label': 'List of nested objects', 'items': {'type': 'object', 'properties': {'nested1': {'type': 'string', 'label': 'Nested object field', 'default': None}}}}, + }, { + 'field_string': 'This is a string', + 'field_string2': None, + 'field_markdown': 'Some **markdown**\n* String\n*List', + 'field_cvss': 'CVSS:3.1/AV:N/AC:H/PR:N/UI:R/S:C/C:H/I:H/A:H', + 'field_date': '2022-01-01', + 'field_int': 17, + 'field_bool': True, + 'field_enum': 'enum2', + 'field_combobox': 'value2', + 'field_object': {'nested1': 'val'}, + 'field_list': ['test'], + 'field_list_objects': [{'nested1': 'test'}, {'nested1': 'values'}], + 'field_additional': 'test', + }), + (False, {'f': {'type': 'string'}}, {'f': {}}), + (False, {'f': {'type': 'string'}}, {}), + (False, {'f': {'type': 'list', 'items': {'type': 'object', 'properties': {'f': {'type': 'string'}}}}}, {'f': [{'f': 'v'}, {'f': 1}]}), + (True, {'f': {'type': 'list', 'items': {'type': 'object', 'properties': {'f': {'type': 'string'}}}}}, {'f': [{'f': 'v'}, {'f': None}]}), + (True, {'f': {'type': 'list', 'items': {'type': 'string'}}}, {'f': []}), + (False, {'f': {'type': 'list', 'items': {'type': 'string'}}}, {'f': None}), + (True, {'f': {'type': 'combobox', 'suggestions': ['a', 'b']}}, {'f': 'other'}), + # (False, {'f': {'type': 'user'}}, {'f': str(uuid4())}), +]) +def test_field_values(valid, definition, value): + res_valid = True + try: + FieldValuesValidator(parse_field_definition(definition))(value) + except (ValidationError, ValueError): + res_valid = False + assert res_valid == valid + + +@pytest.mark.django_db +def test_user_field_value(): + user = create_user() + FieldValuesValidator(parse_field_definition({'field_user': {'type': 'user', 'label': 'User Field'}}))({'field_user': str(user.id)}) + + +class CustomFieldsTestModel(CustomFieldsMixin): + def __init__(self, field_definition, *args, **kwargs) -> None: + super().__init__(*args, **kwargs) + self._field_definition = parse_field_definition(field_definition) + + @property + def field_definition(self): + return self._field_definition + + +@pytest.mark.parametrize('definition,old_value,new_value', [ + ({'a': {'type': 'string'}}, {'a': 'old'}, {'a': 'new'}), + ({'a': {'type': 'string'}}, {'a': 'text'}, {'a': None}), + ({'a': {'type': 'number'}}, {'a': 10}, {'a': None}), + ({'a': {'type': 'enum', 'choices': [{'value': 'a'}]}}, {'a': 'a'}, {'a': None}), + ({'a': {'type': 'list', 'items': {'type': 'enum', 'choices': [{'value': 'a'}]}}}, {'a': ['a', 'a']}, {'a': ['a', None]}), + ({'a': {'type': 'list', 'items': {'type': 'string'}}}, {'a': ['text']}, {'a': []}), +]) +def test_update_field_values(definition, old_value, new_value): + m = CustomFieldsTestModel(field_definition=definition, custom_fields=old_value) + m.update_data(new_value) + assert m.data == new_value + + +@pytest.mark.parametrize('compatible,a,b', [ + (True, {'a': {'type': 'string'}}, {'b': {'type': 'string'}}), + (True, {'a': {'type': 'string'}}, {'a': {'type': 'string'}}), + (True, {'a': {'type': 'string', 'label': 'left', 'default': 'left', 'required': False}}, {'a': {'type': 'string', 'label': 'right', 'defualt': 'right', 'required': True}}), + (True, {'a': {'type': 'string'}}, {'a': {'type': 'string'}, 'b': {'type': 'string'}}), + (True, {'a': {'type': 'string'}, 'b': {'type': 'string'}}, {'a': {'type': 'string'}}), + (False, {'a': {'type': 'string'}}, {'a': {'type': 'list', 'items': {'type': 'string'}}}), + (False, {'a': {'type': 'string'}}, {'a': {'type': 'markdown'}}), + (True, {'a': {'type': 'list', 'items': {'type': 'string'}}}, {'a': {'type': 'list', 'items': {'type': 'string'}}}), + (False, {'a': {'type': 'list', 'items': {'type': 'string'}}}, {'a': {'type': 'list', 'items': {'type': 'number'}}}), + (True, {'a': {'type': 'object', 'properties': {'a': {'type': 'string'}}}}, {'a': {'type': 'object', 'properties': {'a': {'type': 'string'}}}}), + (True, {'a': {'type': 'object', 'properties': {'a': {'type': 'string'}}}}, {'a': {'type': 'object', 'properties': {'a': {'type': 'boolean'}}}}), + (True, {'a': {'type': 'enum', 'choices': [{'value': 'a'}]}}, {'a': {'type': 'enum', 'choices': [{'value': 'a'}]}}), + (True, {'a': {'type': 'enum', 'choices': [{'value': 'a'}]}}, {'a': {'type': 'enum', 'choices': [{'value': 'a'}, {'value': 'b'}]}}), + (False, {'a': {'type': 'enum', 'choices': [{'value': 'a'}, {'value': 'b'}]}}, {'a': {'type': 'enum', 'choices': [{'value': 'a'}]}}), + (True, {'a': {'type': 'combobox', 'suggestions': ['a']}}, {'a': {'type': 'combobox', 'choices': ['b']}}), +]) +def test_definitions_compatible(compatible, a, b): + assert check_definitions_compatible(parse_field_definition(a), parse_field_definition(b))[0] == compatible + + +@pytest.mark.django_db +class TestUpdateFieldDefinition: + @pytest.fixture(autouse=True) + def setUp(self) -> None: + self.project_type = create_project_type() + self.project = create_project(project_type=self.project_type) + self.finding = create_finding(project=self.project) + + self.project_other = create_project() + self.finding_other = create_finding(project=self.project_other) + + def refresh_data(self): + self.project_type.refresh_from_db() + self.project.refresh_from_db() + self.finding.refresh_from_db() + self.project_other.refresh_from_db() + self.finding_other.refresh_from_db() + + def test_add_report_field(self): + default_value = 'new' + self.project_type.report_fields |= { + 'field_new': {'type': 'string', 'label': 'New field', 'default': default_value}, + } + self.project_type.save() + self.refresh_data() + + section = self.project.sections.get(section_id='other') + assert 'field_new' in section.section_definition['fields'] + assert self.project_type.report_preview_data['report']['field_new'] == default_value + + # New field added to projects + assert 'field_new' in section.data + assert section.data['field_new'] == default_value + + assert 'field_new' not in self.project_other.data_all + + def test_add_finding_field(self): + default_value = 'new' + self.project_type.finding_fields |= { + 'field_new': {'type': 'string', 'label': 'New field', 'default': default_value}, + } + self.project_type.save() + self.refresh_data() + + assert self.project_type.finding_field_order[-1] == 'field_new' + assert self.project_type.report_preview_data['findings'][0]['field_new'] == default_value + + # New field added to projects + assert 'field_new' in self.finding.data + assert self.finding.data['field_new'] == default_value + + assert 'field_new' not in self.finding_other.data + + def test_delete_report_field(self): + old_value = self.project.data['field_string'] + del self.project_type.report_fields['field_string'] + self.project_type.save() + self.refresh_data() + + assert 'field_string' not in set(itertools.chain(*map(lambda s: s['fields'], self.project_type.report_sections))) + assert 'field_string' in self.project_type.report_preview_data['report'] + + # Field removed from project (but data is kept in DB) + assert 'field_string' not in self.project.data + assert 'field_string' in self.project.data_all + assert self.project.data_all['field_string'] == old_value + + assert 'field_string' in self.project_other.data + + def test_delete_finding_field(self): + old_value = self.finding.data['field_string'] + del self.project_type.finding_fields['field_string'] + self.project_type.save() + self.refresh_data() + + assert 'field_string' not in self.project_type.finding_field_order + assert 'field_string' in self.project_type.report_preview_data['findings'][0] + + # Field remove from project (but data is kept in DB) + assert 'field_string' not in self.finding.data + assert 'field_string' in self.finding.data_all + assert self.finding.data_all['field_string'] == old_value + + assert 'field_string' in self.finding_other.data + + def test_change_type_report_field(self): + self.project_type.report_fields |= { + 'field_string': {'type': 'object', 'label': 'Changed type', 'properties': {'nested': {'type': 'string', 'label': 'Nested field', 'default': 'default'}}}, + } + self.project_type.save() + self.refresh_data() + + assert isinstance(self.project_type.report_preview_data['report']['field_string'], dict) + section = self.project.sections.get(section_id='other') + assert section.data['field_string'] == {'nested': 'default'} + + def test_change_type_finding_field(self): + self.project_type.finding_fields |= { + 'field_string': {'type': 'object', 'label': 'Changed type', 'properties': {'nested': {'type': 'string', 'label': 'Nested field', 'default': 'default'}}}, + } + self.project_type.save() + self.refresh_data() + + assert isinstance(self.project_type.report_preview_data['findings'][0]['field_string'], dict) + assert self.finding.data['field_string'] == {'nested': 'default'} + + def test_change_default_report_field(self): + default_val = 'changed' + self.project_type.report_fields['field_string']['default'] = default_val + self.project_type.save() + self.refresh_data() + + assert self.project.data['field_string'] != default_val + + project_new = create_project(project_type=self.project_type) + assert project_new.data['field_string'] == default_val + + def test_change_default_finding_field(self): + default_val = 'changed' + self.project_type.finding_fields['field_string']['default'] = default_val + self.project_type.save() + self.refresh_data() + + assert self.finding.data['field_string'] != default_val + + finding_new = create_finding(project=self.project) + assert finding_new.data['field_string'] == default_val + + def test_restore_data_report_field(self): + old_value = self.project.data['field_string'] + old_definition = self.project_type.report_fields['field_string'] + + # Delete field from definition + del self.project_type.report_fields['field_string'] + self.project_type.save() + self.refresh_data() + assert 'field_string' not in self.project.data + assert self.project.data_all['field_string'] == old_value + + # Restore field in definition + self.project_type.report_fields |= {'field_string': old_definition | {'labal': 'Changed name', 'default': 'other'}} + self.project_type.save() + self.refresh_data() + assert self.project.data['field_string'] == old_value + + def test_restore_data_finding_field(self): + old_value = self.finding.data['field_string'] + old_definition = self.project_type.finding_fields['field_string'] + + # Delete field from definition + del self.project_type.finding_fields['field_string'] + self.project_type.save() + self.refresh_data() + assert 'field_string' not in self.finding.data + assert self.finding.data_all['field_string'] == old_value + + # Restore field in definition + self.project_type.finding_fields |= {'field_string': old_definition | {'labal': 'Changed name', 'default': 'other'}} + self.project_type.save() + self.refresh_data() + assert self.finding.data['field_string'] == old_value + + def test_change_project_type_report_fields(self): + old_value = self.project.data['field_string'] + project_type_new = create_project_type(report_fields=field_definition_to_dict(REPORT_FIELDS_CORE) | { + 'field_new': {'type': 'string', 'default': 'default', 'label': 'New field'} + }) + self.project.project_type = project_type_new + self.project.save() + self.refresh_data() + + assert 'field_string' not in self.project.data + assert self.project.data_all['field_string'] == old_value + assert self.project.data['field_new'] == 'default' + + def test_change_project_type_finding_fields(self): + old_value = self.project.data['field_string'] + project_type_new = create_project_type(finding_fields=field_definition_to_dict(FINDING_FIELDS_CORE) | { + 'field_new': {'type': 'string', 'default': 'default', 'label': 'New field'} + }) + self.project.project_type = project_type_new + self.project.save() + self.refresh_data() + + assert 'field_string' not in self.finding.data + assert self.finding.data_all['field_string'], old_value + assert self.finding.data['field_new'], 'default' + + +@pytest.mark.django_db +class TestPredefinedFields: + @pytest.fixture(autouse=True) + def setUp(self) -> None: + self.project_type = create_project_type( + finding_fields=field_definition_to_dict(FINDING_FIELDS_CORE | copy_keys(FINDING_FIELDS_PREDEFINED, 'description'))) + project = create_project(project_type=self.project_type) + self.finding = create_finding(project=project) + + def test_change_structure(self): + with pytest.raises(ValidationError): + self.project_type.finding_fields |= { + 'description': {'type': 'list', 'label': 'Changed', 'items': {'type': 'string', 'default': 'changed'}} + } + self.project_type.clean_fields() + + def test_add_conflicting_field(self): + with pytest.raises(ValidationError): + self.project_type.finding_fields |= { + 'recommendation': {'type': 'list', 'label': 'Changed', 'items': {'type': 'string', 'default': 'changed'}} + } + self.project_type.clean_fields() + + +@pytest.mark.django_db +class TestTemplateFieldDefinition: + @pytest.fixture(autouse=True) + def setUp(self): + self.project_type1 = create_project_type( + finding_fields=field_definition_to_dict(FINDING_FIELDS_CORE | { + 'field1': {'type': 'string', 'default': 'default', 'label': 'Field 1'}, + 'field_conflict': {'type': 'string', 'default': 'default', 'label': 'Conflicting field type'}, + }) + ) + self.project_type2 = create_project_type( + finding_fields=field_definition_to_dict(FINDING_FIELDS_CORE | { + 'field2': {'type': 'string', 'default': 'default', 'label': 'Field 2'}, + 'field_conflict': {'type': 'list', 'label': 'conflicting field type', 'items': {'type': 'string', 'default': 'default'}} + }) + ) + self.project_type_hidden = create_project_type( + finding_fields=field_definition_to_dict(FINDING_FIELDS_CORE | { + 'field_hidden': {'type': 'string', 'default': 'default', 'label': 'Field of hidden ProjectType'}, + }) + ) + project_hidden = create_project(project_type=self.project_type_hidden) + self.project_type_hidden.linked_project = project_hidden + self.project_type_hidden.save() + + self.template = create_template(data={'title': 'test', 'field1': 'f1 value', 'field2': 'f2 value'}) + + with override_settings(CACHES={'default': { 'BACKEND': 'django.core.cache.backends.dummy.DummyCache'}}): + yield + + def test_get_template_field_definition(self): + assert \ + set(FindingTemplate.field_definition.keys()) == \ + set(FINDING_FIELDS_CORE.keys()) | set(FINDING_FIELDS_PREDEFINED.keys()) | {'field1', 'field2', 'field_conflict'} + assert FindingTemplate.field_definition['field_conflict'].type == FieldDataType.STRING + + def test_delete_field_definition(self): + old_value = self.template.data['field1'] + del self.project_type1.finding_fields['field1'] + self.project_type1.save() + self.template.refresh_from_db() + + assert 'field1' not in FindingTemplate.field_definition + assert self.template.data_all['field1'] == old_value + + def test_change_field_type(self): + self.project_type1.finding_fields |= {'field1': {'type': 'list', 'label': 'changed field type', 'items': {'type': 'string', 'default': 'default'}}} + self.project_type1.save() + self.template.refresh_from_db() + + assert FindingTemplate.field_definition['field1'].type == FieldDataType.LIST + assert self.template.data['field1'] == [] + + +@pytest.mark.django_db +class TestReportSectionDefinition: + @pytest.fixture(autouse=True) + def setUp(self): + field_definition = {'type': 'string', 'default': 'default', 'label': 'Field label'} + self.project_type = create_project_type( + report_fields=field_definition_to_dict(REPORT_FIELDS_CORE) | { + 'field1': field_definition, + 'field2': field_definition, + 'field3': field_definition, + }, + report_sections=[ + {'id': 'section1', 'fields': ['field1'], 'label': 'Section 1'}, + {'id': 'section2', 'fields': ['field2'], 'label': ['Section 2']}, + ] + ) + self.project = create_project(project_type=self.project_type) + + def test_fields_in_no_section_put_it_other_section(self): + assert set(self.project.sections.values_list('section_id', flat=True)) == {'section1', 'section2', 'other'} + assert set(self.project.sections.get(section_id='other').section_fields) == set(REPORT_FIELDS_CORE.keys()) | {'field3'} + + def test_add_section(self): + self.project_type.report_fields |= {'field_new': {'type': 'string', 'default': 'default', 'label': 'new field'}} + self.project_type.report_sections += [{'id': 'section_new', 'fields': ['field_new']}] + self.project_type.save() + self.project.refresh_from_db() + + section_new = self.project.sections.get(section_id='section_new') + assert section_new.section_fields == ['field_new'] + assert section_new.data['field_new'] == 'default' + + def test_delete_section(self): + old_value = self.project.sections.get(section_id='section1').data['field1'] + section1 = next(filter(lambda s: s['id'] == 'section1', self.project_type.report_sections)) + section2 = next(filter(lambda s: s['id'] == 'section2', self.project_type.report_sections)) + section2['fields'].extend(section1['fields']) + self.project_type.report_sections = list(filter(lambda s: s['id'] != 'section1', self.project_type.report_sections)) + self.project_type.save() + self.project.refresh_from_db() + + assert not self.project.sections.filter(section_id='section1').exists() + assert self.project.sections.get(section_id='section2').data['field1'] == old_value + + def test_move_field_to_other_section(self): + old_value = self.project.sections.get(section_id='section1').data['field1'] + section1 = next(filter(lambda s: s['id'] == 'section1', self.project_type.report_sections)) + section1['fields'].remove('field1') + section2 = next(filter(lambda s: s['id'] == 'section2', self.project_type.report_sections)) + section2['fields'].append('field1') + self.project_type.save() + self.project.refresh_from_db() + + assert self.project.sections.filter(section_id='section1').exists() + assert self.project.sections.get(section_id='section2').data['field1'] == old_value + diff --git a/api/src/reportcreator_api/tests/test_cvss.py b/api/src/reportcreator_api/tests/test_cvss.py new file mode 100644 index 0000000..a7f2cdf --- /dev/null +++ b/api/src/reportcreator_api/tests/test_cvss.py @@ -0,0 +1,50 @@ +import pytest + +from reportcreator_api.pentests import cvss + + +@pytest.mark.parametrize("vector,score", [ + (None, 0.0), + ('n/a', 0.0), + + ('CVSS:3.0/AV:N', 0.0), + ('CVSS:3.0/AV:N/AC:H/PR:L/UI:R/S:U/C:L/I:L/A:L/XX:X', 0.0), + ('CVSS:3.0/AV:J/AC:H/PR:L/UI:R/S:U/C:L/I:L/A:L', 0.0), + ('CVSS:3.0/AV:N/AC:L/PR:N/UI:N/S:U/C:N/I:N/A:N', 0.0), + ('CVSS:3.0/AV:N/AC:H/PR:L/UI:R/S:U/C:L/I:L/A:L', 4.6), + ('CVSS:3.0/AV:N/AC:H/PR:L/UI:R/S:C/C:L/I:L/A:L', 5.5), + ('CVSS:3.0/AV:A/AC:H/PR:N/UI:R/S:C/C:H/I:L/A:L', 7.0), + ('CVSS:3.0/AV:N/AC:L/PR:L/UI:N/S:C/C:H/I:H/A:H', 9.9), + ('CVSS:3.0/AV:N/AC:L/PR:N/UI:N/S:C/C:H/I:H/A:H', 10.0), + ('CVSS:3.0/AV:N/AC:L/PR:N/UI:N/S:C/C:H/I:H/A:H/E:P/RL:T/RC:U', 8.4), + ('CVSS:3.0/AV:N/AC:L/PR:N/UI:N/S:C/C:H/I:H/A:H/E:P/RL:X/RC:U/CR:M/IR:H/AR:X/MAV:A/MAC:L/MPR:L/MUI:R/MS:U/MC:L/MI:L/MA:X', 5.7), + + ('CVSS:3.1/AV:N', 0.0), + ('CVSS:3.1/AV:N/AC:H/PR:L/UI:R/S:U/C:L/I:L/A:L/XX:X', 0.0), + ('CVSS:3.1/AV:J/AC:H/PR:L/UI:R/S:U/C:L/I:L/A:L', 0.0), + ('CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:N/I:N/A:N', 0.0), + ('CVSS:3.1/AV:N/AC:H/PR:L/UI:R/S:U/C:L/I:L/A:L', 4.6), + ('CVSS:3.1/AV:N/AC:H/PR:L/UI:R/S:C/C:L/I:L/A:L', 5.5), + ('CVSS:3.1/AV:A/AC:H/PR:N/UI:R/S:C/C:H/I:L/A:L', 7.0), + ('CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:C/C:H/I:H/A:H', 10.0), + ('CVSS:3.1/AV:N/AC:L/PR:L/UI:N/S:C/C:H/I:L/A:N/CR:H', 10.0), + ('CVSS:3.1/AV:N/AC:L/PR:L/UI:N/S:C/C:H/I:H/A:H', 9.9), + ('CVSS:3.1/AV:N/AC:L/PR:L/UI:R/S:C/C:H/I:H/A:H', 9.0), + ('CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:C/C:H/I:H/A:H/E:P/RL:T/RC:U', 8.4), + ('CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:C/C:H/I:H/A:H/E:P/RL:X/RC:U/CR:M/IR:H/AR:X/MAV:A/MAC:L/MPR:L/MUI:R/MS:U/MC:L/MI:L/MA:X', 5.7), + + ('CVSS2#AV:N', 0.0), + ('CVSS2#AV:N/AC:M/Au:M/C:P/I:C/A:N/XX:X', 0.0), + ('CVSS2#AV:N/AC:M/Au:M/C:P/I:C/A:J', 0.0), + ('AV:N/AC:L/Au:N/C:N/I:N/A:N', 0.0), + ('CVSS2#AV:N/AC:M/Au:M/C:P/I:C/A:N', 6.4), + ('AV:N/AC:M/Au:M/C:P/I:C/A:N', 6.4), + ('AV:N/AC:L/Au:N/C:C/I:C/A:C', 10.0), + ('CVSS2#AV:N/AC:L/Au:N/C:P/I:N/A:N', 5.0), + ('AV:N/AC:M/Au:M/C:P/I:C/A:N/E:F/RL:TF/RC:C', 5.5), + ('AV:N/AC:M/Au:M/C:P/I:C/A:N/E:F/RL:TF/RC:C/CDP:MH/TD:L/CR:H/IR:M/AR:L', 1.9), + ('AV:N/AC:M/Au:M/C:P/I:C/A:N/E:F/RL:ND/RC:C/CDP:ND/TD:H/CR:H/IR:M/AR:ND', 6.4), +]) +def test_cvss(vector, score): + assert cvss.calculate_score(vector) == score + diff --git a/api/src/reportcreator_api/tests/test_import_export.py b/api/src/reportcreator_api/tests/test_import_export.py new file mode 100644 index 0000000..f8b6dd7 --- /dev/null +++ b/api/src/reportcreator_api/tests/test_import_export.py @@ -0,0 +1,346 @@ +import pytest +import io +from django.core.files.base import ContentFile +from django.test import override_settings +from rest_framework.exceptions import ValidationError +from reportcreator_api.pentests.models import PentestProject, ProjectType, SourceEnum, UploadedAsset, UploadedImage, ProjectMemberRole +from reportcreator_api.tests.utils import assertKeysEqual +from reportcreator_api.archive.import_export import export_project_types, export_projects, export_templates, import_project_types, import_projects, import_templates +from reportcreator_api.tests.mock import create_notebookpage, create_project, create_project_type, create_template, create_user, create_finding + + +def archive_to_file(archive_iterator): + return io.BytesIO(b''.join(archive_iterator)) + + +def members_equal(a, b): + def format_members(m): + return sorted([(m['user'], set(m['roles'])) for m in a.values('user', 'roles')], key=lambda i: i[0]) + + return format_members(a) == format_members(b) + + +@pytest.mark.django_db +class TestImportExport: + @pytest.fixture(autouse=True) + def setUp(self) -> None: + self.user = create_user() + self.template = create_template() + self.project_type = create_project_type() + self.project = create_project( + project_type=self.project_type, + members=[self.user], + report_data={'field_user': str(self.user.id)}, + findings_kwargs=[ + {'assignee': self.user, 'template': self.template}, + {'assignee': None, 'template': None}, + ], + notes_kwargs=[]) + note1 = create_notebookpage(project=self.project, title='Note 1', text='Note text 1') + create_notebookpage(project=self.project, parent=note1, title='Note 1.1', text='Note text 1.1') + + with override_settings(COMPRESS_IMAGES=False): + yield + + def test_export_import_template(self): + archive = archive_to_file(export_templates([self.template])) + imported = import_templates(archive) + + assert len(imported) == 1 + t = imported[0] + + assertKeysEqual(t, self.template, ['created', 'language', 'status', 'data', 'data_all']) + assert set(t.tags) == set(self.template.tags) + assert t.source == SourceEnum.IMPORTED + + def test_export_import_project_type(self): + archive = archive_to_file(export_project_types([self.project_type])) + self.project_type.refresh_from_db() + imported = import_project_types(archive) + + assert len(imported) == 1 + t = imported[0] + + assertKeysEqual(t, self.project_type, [ + 'created', 'name', 'language', + 'report_fields', 'report_sections', 'finding_fields', 'finding_field_order', + 'report_template', 'report_styles', 'report_preview_data']) + assert t.source == SourceEnum.IMPORTED + + assert {(a.name, a.file.read()) for a in t.assets.all()} == {(a.name, a.file.read()) for a in self.project_type.assets.all()} + + def assert_export_import_project(self, project, p): + assertKeysEqual(p, project, ['name', 'language']) + assert members_equal(p.members, project.members) + assert p.data == project.data + assert p.data_all == project.data_all + assert p.source == SourceEnum.IMPORTED + + assert p.sections.count() == project.sections.count() + for i, s in zip(p.sections.order_by('section_id'), project.sections.order_by('section_id')): + assertKeysEqual(i, s, ['section_id', 'created', 'assignee', 'status', 'data']) + + assert p.findings.count() == project.findings.count() + for i, s in zip(p.findings.order_by('finding_id'), project.findings.order_by('finding_id')): + assertKeysEqual(i, s, ['finding_id', 'created', 'assignee', 'status', 'template', 'data', 'data_all']) + + assert {(i.name, i.file.read()) for i in p.images.all()} == {(i.name, i.file.read()) for i in project.images.all()} + + assertKeysEqual(p.project_type, project.project_type, [ + 'created', 'name', 'language', + 'report_fields', 'report_sections', 'finding_fields', 'finding_field_order', + 'report_template', 'report_styles', 'report_preview_data']) + assert p.project_type.source == SourceEnum.IMPORTED_DEPENDENCY + assert p.project_type.linked_project == p + + assert {(a.name, a.file.read()) for a in p.project_type.assets.all()} == {(a.name, a.file.read()) for a in project.project_type.assets.all()} + + def test_export_import_project(self): + archive = archive_to_file(export_projects([self.project])) + self.project.refresh_from_db() + imported = import_projects(archive) + assert len(imported) == 1 + p = imported[0] + self.assert_export_import_project(self.project, p) + assert p.notes.count() == 0 + assert p.files.count() == 0 + + def test_export_import_project_all(self): + archive = archive_to_file(export_projects([self.project], export_all=True)) + self.project.refresh_from_db() + imported = import_projects(archive) + assert len(imported) == 1 + p = imported[0] + self.assert_export_import_project(self.project, p) + + assert p.notes.count() == self.project.notes.count() + for i, s in zip(p.notes.order_by('note_id'), self.project.notes.order_by('note_id')): + assertKeysEqual(i, s, ['note_id', 'created', 'title', 'text', 'checked', 'icon_emoji', 'status_emoji', 'order']) + assert i.parent.note_id == s.parent.note_id if s.parent else i.parent is None + + assert {(f.name, f.file.read()) for f in p.files.all()} == {(f.name, f.file.read()) for f in self.project.files.all()} + + def test_import_nonexistent_user(self): + # export project with members and assignee, delete user, import => members and assignee == NULL + # export project with UserField, delete user, import => user inlined in project.imported_members + archive = archive_to_file(export_projects([self.project])) + old_user_id = self.user.id + old_user_roles = self.project.members.all()[0].roles + self.user.delete() + p = import_projects(archive)[0] + + assert p.members.count() == 0 + assert p.sections.exclude(assignee=None).count() == 0 + assert p.findings.exclude(assignee=None).count() == 0 + + # Check UUID of nonexistent user is still present in data + assert p.data_all == self.project.data_all + for i, s in zip(p.findings.order_by('created'), self.project.findings.order_by('created')): + assertKeysEqual(i, s, ['finding_id', 'created', 'assignee', 'template', 'data', 'data_all']) + + # Test nonexistent user is added to project.imported_members + assert len(p.imported_members) == 1 + assert p.imported_members[0]['id'] == str(old_user_id) + assert p.imported_members[0]['roles'] == old_user_roles + assertKeysEqual(p.imported_members[0], self.user, [ + 'email', 'phone', 'mobile', + 'name', 'title_before', 'first_name', 'middle_name', 'last_name', 'title_after', + ]) + + # Test re-create user: at re-import the original user should be referenced + archive2 = archive_to_file(export_projects([p])) + self.user.id = old_user_id + self.user.save() + p2 = import_projects(archive2)[0] + assert p2.members.count() == 1 + assert len(p2.imported_members) == 0 + members_equal(p2.members, self.project.members) + + def test_import_nonexistent_template_reference(self): + archive = archive_to_file(export_projects([self.project])) + self.template.delete() + p = import_projects(archive)[0] + + assert p.findings.exclude(template_id=None).count() == 0 + + def test_import_wrong_archive(self): + archive = archive_to_file(export_templates([self.template])) + with pytest.raises(ValidationError): + import_projects(archive) + + +@pytest.mark.django_db +class TestLinkedProject: + @pytest.fixture(autouse=True) + def setUp(self): + self.project_type = create_project_type(source=SourceEnum.IMPORTED_DEPENDENCY) + self.project = create_project(project_type=self.project_type, source=SourceEnum.IMPORTED) + self.project_type.linked_project = self.project + self.project_type.save() + + def test_delete_linked_project(self): + # On delete linked_project: project_type should also be deleted + self.project.delete() + assert not ProjectType.objects.filter(id=self.project_type.id).exists() + + def test_delete_linked_project_multiple_project_types(self): + # On delete linked_project + unused_pt = create_project_type(linked_project=self.project, source=SourceEnum.IMPORTED_DEPENDENCY) + + self.project.delete() + assert not ProjectType.objects.filter(id=self.project_type.id).exists() + assert not ProjectType.objects.filter(id=unused_pt.id).exists() + + def test_delete_linked_project_project_type_used_by_another_project(self): + second_p = create_project(project_type=self.project_type) + + self.project.delete() + assert ProjectType.objects.filter(id=self.project_type.id).exists() + assert PentestProject.objects.filter(id=second_p.id).exists() + self.project_type.refresh_from_db() + assert self.project_type.linked_project is None + + +@pytest.mark.django_db +class TestFileDelete: + @pytest.fixture(autouse=True) + def setUp(self) -> None: + p = create_project() + self.image = p.images.first() + self.asset = p.project_type.assets.first() + + def assertFileExists(self, file, expected): + exists = False + try: + with file.open(): + exists = True + except ValueError: + exists = False + assert exists == expected + + def test_delete_file_referenced_only_once(self): + self.image.delete() + self.assertFileExists(self.image.file, False) + + self.asset.delete() + self.assertFileExists(self.asset.file, False) + + def test_delete_file_referenced_multiple_times(self): + UploadedImage.objects.create(linked_object=self.image.linked_object, name='new.png', file=self.image.file) + self.image.delete() + self.assertFileExists(self.image.file, True) + + UploadedAsset.objects.create(linked_object=self.asset.linked_object, name='new.png', file=self.asset.file) + self.asset.delete() + self.assertFileExists(self.asset.file, True) + + def test_delete_copied_images(self): + p = create_project() + p2 = p.copy() + + images = list(p.images.order_by('name_hash')) + for o, c in zip(images, p2.images.order_by('name_hash')): + assert o.file == c.file + p.delete() + for i in images: + self.assertFileExists(i.file, True) + + def test_delete_copied_assets(self): + t = create_project_type() + t2 = t.copy() + + assets = list(t.assets.order_by('name_hash')) + for o, c in zip(assets, t2.assets.order_by('name_hash')): + assert o.file == c.file + t.delete() + for a in assets: + self.assertFileExists(a.file, True) + + +@pytest.mark.django_db +class TestCopyModel: + def assert_project_type_copy_equal(self, pt, cp, exclude_fields=[]): + assert pt != cp + assert not cp.is_locked + assertKeysEqual(pt, cp, { + 'name', 'language', 'linked_project', + 'report_template', 'report_styles', 'report_preview_data', + 'report_fields', 'report_sections', 'finding_fields', 'finding_field_order', + } - set(exclude_fields)) + + assert set(pt.assets.values_list('id', flat=True)).intersection(cp.assets.values_list('id', flat=True)) == set() + assert {(a.name, a.file.read()) for a in pt.assets.all()} == {(a.name, a.file.read()) for a in cp.assets.all()} + + def test_copy_project(self): + user = create_user() + p = create_project(members=[user], readonly=True, source=SourceEnum.IMPORTED) + create_notebookpage(project=p, parent=p.notes.first()) + finding = create_finding(project=p, template=create_template()) + finding.lock(user) + p.sections.first().lock(user) + cp = p.copy() + + assert p != cp + assert not cp.readonly + assertKeysEqual(p, cp, [ + 'name', 'source', 'language', 'imported_members', 'data_all' + ]) + self.assert_project_type_copy_equal(p.project_type, cp.project_type, exclude_fields=['source', 'linked_project']) + assert cp.project_type.source == SourceEnum.SNAPSHOT + assert cp.project_type.linked_project == cp + assert members_equal(p.members, cp.members) + + assert set(p.images.values_list('id', flat=True)).intersection(cp.images.values_list('id', flat=True)) == set() + assert {(i.name, i.file.read()) for i in p.images.all()} == {(i.name, i.file.read()) for i in cp.images.all()} + + assert set(p.files.values_list('id', flat=True)).intersection(cp.files.values_list('id', flat=True)) == set() + assert {(f.name, f.file.read()) for f in p.files.all()} == {(f.name, f.file.read()) for f in cp.files.all()} + + for p_s, cp_s in zip(p.sections.order_by('section_id'), cp.sections.order_by('section_id')): + assert p_s != cp_s + assertKeysEqual(p_s, cp_s, ['section_id', 'assignee', 'data']) + assert not cp_s.is_locked + + for p_f, cp_f in zip(p.findings.order_by('finding_id'), cp.findings.order_by('finding_id')): + assert p_f != cp_f + assertKeysEqual(p_f, cp_f, ['finding_id', 'assignee', 'data', 'template']) + assert not cp_f.is_locked + + for p_n, cp_n in zip(p.notes.order_by('note_id'), cp.notes.order_by('note_id')): + assert p_n != cp_n + assertKeysEqual(p_n, cp_n, ['note_id', 'title', 'text', 'emoji', 'order']) + assert not cp_f.is_locked + if p_n.parent: + assert p_n.parent.note_id == cp_n.parent.note_id + assert p_n.parent != cp_n.parent + else: + assert cp_n.parent is None + + def test_copy_project_type(self): + user = create_user() + project = create_project() + pt = create_project_type(source=SourceEnum.IMPORTED, linked_project=project) + pt.lock(user) + cp = pt.copy() + + self.assert_project_type_copy_equal(pt, cp) + + +@pytest.mark.parametrize('original,cleaned', [ + ('test.txt', 'test.txt'), + # Attacks + ('te\x00st.txt', 'te-st.txt'), + ('te/st.txt', 'st.txt'), + ('t/../../../est.txt', 'est.txt'), + ('../test1.txt', 'test1.txt'), + ('..', 'file'), + # Markdown conflicts + ('/test2.txt', 'test2.txt'), + ('t**es**t.txt', 't--es--t.txt'), + ('te_st_.txt', 'te-st-.txt'), + ('t![e]()st.txt', 't--e---st.txt'), +]) +@pytest.mark.django_db +def test_uploadedfile_filename(original, cleaned): + actual_name = UploadedAsset.objects.create(name=original, file=ContentFile(b'test'), linked_object=create_project_type()).name + assert actual_name == cleaned \ No newline at end of file diff --git a/api/src/reportcreator_api/tests/test_license.py b/api/src/reportcreator_api/tests/test_license.py new file mode 100644 index 0000000..35439a1 --- /dev/null +++ b/api/src/reportcreator_api/tests/test_license.py @@ -0,0 +1,252 @@ +import json +import pytest +from uuid import uuid4 +from django.test import override_settings +from Cryptodome.Signature import eddsa +from Cryptodome.PublicKey import ECC +from Cryptodome.Hash import SHA512 +from base64 import b64decode, b64encode +from datetime import timedelta +from django.conf import settings +from django.urls import reverse +from django.utils import timezone +from unittest import mock +from rest_framework.test import APIClient +from rest_framework import status +from django.utils.crypto import get_random_string + +from reportcreator_api.utils import license +from reportcreator_api.tests.mock import create_project, create_public_key, create_user, api_client + + +def assert_api_license_error(res): + assert res.status_code == status.HTTP_403_FORBIDDEN + assert res.data['code'] == 'license' + + +@pytest.mark.django_db +class TestCommunityLicenseRestrictions: + @pytest.fixture(autouse=True) + def setUp(self): + self.password = get_random_string(length=32) + self.user = create_user(is_superuser=True, password=self.password) + self.user_regular = create_user(password=self.password) + self.user_system = create_user(is_system_user=True, password=self.password) + self.client = api_client(self.user) + + with mock.patch('reportcreator_api.utils.license.check_license', lambda: {'type': license.LicenseType.COMMUNITY, 'users': 2, 'error': None}): + yield + + def test_spellcheck_disabled(self): + assert self.client.get(reverse('utils-settings')).data['features']['spellcheck'] is False + assert_api_license_error(self.client.post(reverse('utils-spellcheck'))) + assert_api_license_error(self.client.post(reverse('utils-spellcheck-words'))) + + def test_admin_privesc_disabled(self): + assert self.user.is_admin + assert 'admin' in self.client.get(reverse('pentestuser-self')).data['scope'] + assert_api_license_error(self.client.post(reverse('pentestuser-disable-admin-permissions'))) + assert_api_license_error( self.client.post(reverse('pentestuser-enable-admin-permissions'))) + + def test_backup_api_disabled(self): + self.client.force_authenticate(self.user_system) + assert_api_license_error(self.client.post(reverse('utils-backup'), data={'key': settings.BACKUP_KEY})) + + def test_archiving_disabled(self): + public_key = create_public_key(user=self.user) + project = create_project(members=[self.user]) + assert_api_license_error(self.client.post(reverse('userpublickey-list', kwargs={'pentestuser_pk': 'self'}), data={'name': 'test', 'public_key': public_key.public_key})) + assert_api_license_error(self.client.post(reverse('pentestproject-archive', kwargs={'pk': project.pk}))) + + def test_prevent_login_of_nonsuperusers(self): + self.client.force_authenticate(None) + assert_api_license_error(self.client.post(reverse('auth-login'), data={ + 'username': self.user_regular.username, + 'password': self.password + })) + + def test_prevent_login_of_system_users(self): + assert_api_license_error(self.client.post(reverse('auth-login'), data={ + 'username': self.user_system.username, + 'password': self.password, + })) + + def test_prevent_create_non_superusers(self): + self.user_regular.delete() + assert_api_license_error(self.client.post(reverse('pentestuser-list'), data={ + 'username': 'new-user1', + 'password': self.password, + 'is_superuser': False, + })) + + assert self.client.post(reverse('pentestuser-list'), data={ + 'username': 'new-user2', + 'password': self.password, + 'is_superuser': True, + }).status_code == 201 + + def test_prevent_create_system_users(self): + with pytest.raises(license.LicenseError): + create_user(is_superuser=True, is_system_user=True) + + def test_user_count_limit(self): + # Fill max number of superusers + self.user_system.is_system_user = False + self.user_system.is_superuser = True + self.user_system.save() + + # Create user: Try to exceed limit by creating new superusers + with pytest.raises(license.LicenseLimitExceededError): + create_user(is_superuser=True) + assert_api_license_error(self.client.post(reverse('pentestuser-list'), data={ + 'username': 'new-user3', + 'password': self.password, + 'is_superuser': True + })) + + # Update is_superuser: Try to exceed limit by making existing users superusers + with pytest.raises(license.LicenseError): + self.user_regular.is_superuser = True + self.user_regular.save() + assert_api_license_error(self.client.patch(reverse('pentestuser-detail', kwargs={'pk': self.user_regular.pk}), data={'is_superuser': True})) + + # Disable user: should be allowed + self.user_regular.is_active = False + self.user_regular.is_superuser = True + self.user_regular.save() + + # Update is_active: Try to exceed limit by enabling disabled superusers + with pytest.raises(license.LicenseError): + self.user_regular.is_active = True + self.user_regular.save() + + +@pytest.mark.django_db +class TestProfessionalLicenseRestrictions: + @pytest.fixture(autouse=True) + def setUp(self): + self.password = get_random_string(length=32) + self.user = create_user(is_user_manager=True, password=self.password) + self.client = APIClient() + self.client.force_authenticate(self.user) + + with mock.patch('reportcreator_api.utils.license.check_license', lambda: {'type': license.LicenseType.PROFESSIONAL, 'users': 1, 'error': None}): + yield + + def test_user_count_limit(self): + with pytest.raises(license.LicenseLimitExceededError): + create_user(username='new-user1', password=self.password) + assert_api_license_error(self.client.post(reverse('pentestuser-list'), data={ + 'username': 'new-user2', + 'password': self.password, + })) + + +@pytest.mark.django_db +class TestLicenseValidation: + @pytest.fixture(autouse=True) + def setUp(self): + self.license_private_key, self.license_public_key = self.generate_signing_key() + with override_settings(LICENSE_VALIDATION_KEYS=[self.license_public_key]): + yield + + def generate_signing_key(self): + private_key = ECC.generate(curve='ed25519') + public_key = { + 'id': str(uuid4()), + 'algorithm': 'ed25519', + 'key': b64encode(private_key.public_key().export_key(format='DER')).decode() + } + return private_key, public_key + + def sign_license_data(self, license_data_str: str, public_key: dict, private_key): + signer = eddsa.new(key=private_key, mode='rfc8032') + signature = signer.sign(SHA512.new(license_data_str.encode())) + return { + 'key_id': public_key['id'], + 'algorithm': public_key['algorithm'], + 'signature': b64encode(signature).decode(), + } + + def sign_license(self, license_data, keys): + license_data_str = json.dumps(license_data) + return b64encode(json.dumps({ + 'data': license_data_str, + 'signatures': [self.sign_license_data(license_data_str, k[0], k[1]) for k in keys] + }).encode()).decode() + + def signed_license(self, **kwargs): + return self.sign_license({ + 'users': 10, + 'valid_from': (timezone.now() - timedelta(days=30)).date().isoformat(), + 'valid_until': (timezone.now() + timedelta(days=30)).date().isoformat(), + } | kwargs, [(self.license_public_key, self.license_private_key)]) + + @pytest.mark.parametrize('license_str,error', [ + (None, None), + ('', None), + ('asdf', 'load'), + (b64encode(b'asdf'), 'load'), + (b64encode(json.dumps({'data': '{"valid_from": "2000-01-01", "valid_to": "3000-01-01", "users": 10}', 'signatures': []}).encode()), 'no valid signature'), # Missing signatures + ]) + def test_invalid_license_format(self, license_str, error): + license_info = license.decode_and_validate_license(license_str) + assert (license_info['type'] == license.LicenseType.PROFESSIONAL) is False + if error: + assert error in license_info['error'].lower() + else: + assert error is None + + @pytest.mark.parametrize('valid,license_data,error', [ + (False, {'valid_from': '3000-01-01'}, 'not yet valid'), + (False, {'valid_until': '2000-01-1'}, 'expired'), + (False, {'users': -10}, 'user count'), + (False, {'users': 0}, 'user count'), + (True, {}, None), + ]) + def test_license_validation(self, valid, license_data, error): + license_info = license.decode_and_validate_license(self.signed_license(**license_data)) + assert (license_info['type'] == license.LicenseType.PROFESSIONAL) is valid + if not valid: + assert error in license_info['error'].lower() + else: + assert not license_info['error'] + + def test_user_limit_exceeded(self): + create_user() + create_user() + + license_info = license.decode_and_validate_license(self.signed_license(users=1)) + assert license_info['type'] != license.LicenseType.PROFESSIONAL + assert 'limit exceeded' in license_info['error'] + + def test_invalid_signature(self): + license_data = json.dumps({ + 'users': 10, + 'valid_from': '2000-01-01', + 'valid_until': '3000-01-01', + }) + signer = eddsa.new(key=ECC.generate(curve='ed25519'), mode='rfc8032') + signature = signer.sign(SHA512.new(license_data.encode())) + license_info = license.decode_and_validate_license(b64encode(json.dumps({ + 'data': license_data, + 'signatures': [{ + 'key_id': self.license_public_key['id'], + 'algorithm': self.license_public_key['algorithm'], + 'signature': b64encode(signature).decode(), + }] + }).encode()).decode()) + assert license_info['type'] != license.LicenseType.PROFESSIONAL + assert 'no valid signature' in license_info['error'].lower() + + def test_multiple_signatures_only_1_valid(self): + license_1 = self.signed_license() + license_content = json.loads(b64decode(license_1)) + license_content['signatures'].append({ + 'key_id': str(uuid4()), + 'algorithm': 'ed25519', + 'signature': b64encode(eddsa.new(key=ECC.generate(curve='ed25519'), mode='rfc8032').sign(SHA512.new(license_content['data'].encode()))).decode(), + }) + license_2 = b64encode(json.dumps(license_content).encode()) + license_info = license.decode_and_validate_license(license_2) + assert license_info['type'] == license.LicenseType.PROFESSIONAL diff --git a/api/src/reportcreator_api/tests/test_locking.py b/api/src/reportcreator_api/tests/test_locking.py new file mode 100644 index 0000000..6dd80b3 --- /dev/null +++ b/api/src/reportcreator_api/tests/test_locking.py @@ -0,0 +1,67 @@ +import pytest +from django.conf import settings +from django.urls import reverse +from rest_framework.test import APIClient +from reportcreator_api.pentests.models import LockStatus +from reportcreator_api.tests.mock import create_project, create_user, mock_time + + +@pytest.mark.django_db +class TestLocking: + @pytest.fixture(autouse=True) + def setUp(self) -> None: + self.user1 = create_user() + self.user2 = create_user() + self.project = create_project(members=[self.user1, self.user2]) + self.finding = self.project.findings.first() + self.section = self.project.sections.first() + + def test_locking(self): + assert self.finding.lock(user=self.user1) == LockStatus.CREATED + assert self.finding.is_locked + assert self.finding.lock_info_data.user == self.user1 + assert self.finding.lock(user=self.user1) == LockStatus.REFRESHED + assert self.finding.lock(user=self.user2) == LockStatus.FAILED + assert self.finding.unlock(user=self.user2) == False + assert self.finding.unlock(user=self.user1) == True + assert self.finding.lock(user=self.user2) == LockStatus.CREATED + + with mock_time(after=settings.MAX_LOCK_TIME * 2): + assert not self.finding.is_locked + assert self.finding.lock(user=self.user2) == LockStatus.CREATED + + def assert_api_locking(self, obj, url_basename, url_kwargs): + client_u1 = APIClient() + client_u1.force_authenticate(user=self.user1) + client_u2 = APIClient() + client_u2.force_authenticate(user=self.user2) + + # Lock and update + assert client_u1.post(reverse(url_basename + '-lock', kwargs=url_kwargs)).status_code == 201 + obj = obj.__class__.objects.get(pk=obj.pk) + assert obj.is_locked + assert obj.lock_info_data.user == self.user1 + assert client_u1.post(reverse(url_basename + '-lock', kwargs=url_kwargs)).status_code == 200 + assert client_u1.patch(reverse(url_basename + '-detail', kwargs=url_kwargs), data={}).status_code == 200 + + # Other user + assert client_u2.patch(reverse(url_basename + '-detail', kwargs=url_kwargs), data={}).status_code == 403 + assert client_u2.post(reverse(url_basename + '-lock', kwargs=url_kwargs)).status_code == 403 + assert client_u2.post(reverse(url_basename + '-unlock', kwargs=url_kwargs)).status_code == 403 + + # Unlock + assert client_u1.post(reverse(url_basename + '-unlock', kwargs=url_kwargs)).status_code == 200 + obj = obj.__class__.objects.get(pk=obj.pk) + assert not obj.is_locked + + # Update without locking + assert client_u2.patch(reverse(url_basename + '-detail', kwargs=url_kwargs), data={}).status_code == 200 + obj = obj.__class__.objects.get(pk=obj.pk) + assert not obj.is_locked + + def test_api_lock_finding(self): + self.assert_api_locking(obj=self.finding, url_basename='finding', url_kwargs={'project_pk': self.project.pk, 'finding_id': self.finding.finding_id}) + + def test_api_lock_section(self): + self.assert_api_locking(obj=self.section, url_basename='section', url_kwargs={'project_pk': self.project.pk, 'section_id': self.section.section_id}) + diff --git a/api/src/reportcreator_api/tests/test_model_diff.py b/api/src/reportcreator_api/tests/test_model_diff.py new file mode 100644 index 0000000..bd1a88f --- /dev/null +++ b/api/src/reportcreator_api/tests/test_model_diff.py @@ -0,0 +1,48 @@ +import pytest +from pytest_django.asserts import assertNumQueries + +from reportcreator_api.pentests.models import PentestProject +from .mock import create_project_type, create_project + + +@pytest.mark.django_db +def test_model_diff(): + project = create_project() + + p = PentestProject.objects.get(id=project.id) + p.name = 'changed' + p.update_data({'title': 'changed'}) + + assert p.has_changed + assert set(p.changed_fields) == {'name', 'custom_fields'} + assert p.get_field_diff('name') == (project.name, p.name) + assert p.get_field_diff('custom_fields'), (project.custom_fields, p.custom_fields) + + +@pytest.mark.django_db +def test_diff_related(): + project_type = create_project_type() + project_type2 = create_project_type() + project = create_project(project_type=project_type) + + p = PentestProject.objects.get(id=project.id) + p.project_type = project_type2 + assert p.has_changed + assert set(p.changed_fields) == {'project_type_id'} + assert p.get_field_diff('project_type_id') == (project_type.id, project_type2.id) + + +@pytest.mark.django_db +def test_diff_deferred_fields(): + project = create_project() + + # Deferred fields should not cause DB queries + with assertNumQueries(1): + p = PentestProject.objects.only('id', 'readonly').get(id=project.id) + + # Changes on deferred fields are not detected + p.name = 'changed' # write deferred + assert not p.has_changed + # Changes on non-deferred fields are detected + p.readonly = True + assert p.has_changed diff --git a/api/src/reportcreator_api/tests/test_notifications.py b/api/src/reportcreator_api/tests/test_notifications.py new file mode 100644 index 0000000..82ee9e3 --- /dev/null +++ b/api/src/reportcreator_api/tests/test_notifications.py @@ -0,0 +1,145 @@ +import pytest +import uuid +from unittest import mock +from asgiref.sync import async_to_sync +from datetime import timedelta +from django.test import override_settings +from django.utils import timezone +from reportcreator_api.notifications.tasks import fetch_notifications + +from reportcreator_api.tests.mock import create_user +from reportcreator_api.tests.utils import assertKeysEqual +from reportcreator_api.users.models import PentestUser +from reportcreator_api.notifications.models import NotificationSpec + + +@pytest.mark.django_db +class TestNotifications: + @pytest.fixture(autouse=True) + def setUp(self): + self.user_regular = create_user(username='regular') + self.user_template_editor = create_user(username='template_editor', is_template_editor=True) + self.user_designer = create_user(username='designer', is_designer=True) + self.user_user_manager = create_user(username='user_manager', is_user_manager=True) + self.user_superuser = create_user(username='superuser', is_superuser=True) + + @pytest.mark.parametrize('notification,expected_users', [ + (NotificationSpec(), ['regular', 'template_editor', 'designer', 'user_manager', 'superuser']), + (NotificationSpec(active_until=(timezone.now() - timedelta(days=10)).date()), []), + (NotificationSpec(user_conditions={'is_superuser': True}), ['superuser']), + (NotificationSpec(user_conditions={'is_superuser': False}), ['regular', 'template_editor', 'designer', 'user_manager']), + (NotificationSpec(user_conditions={'is_user_manager': True}), ['user_manager']), + (NotificationSpec(user_conditions={'is_designer': True}), ['designer']), + (NotificationSpec(user_conditions={'is_template_editor': True}), ['template_editor']), + (NotificationSpec(user_conditions={'is_superuser': False, 'is_user_manager': False, 'is_designer': False, 'is_template_editor': False}), ['regular']), + ]) + def test_user_conditions(self, notification, expected_users): + # Test queryset filter + assert set(NotificationSpec.objects.users_for_notification(notification).values_list('username', flat=True)) == set(expected_users) + + # Assigned to correct users + notification.save() + assert set(notification.usernotification_set.values_list('user__username', flat=True)) == set(expected_users) + + # Reverse filter + for u in PentestUser.objects.filter(username__in=expected_users): + assert notification in NotificationSpec.objects.notifications_for_user(u) + + @pytest.mark.parametrize('expected,notification,instance_settings', [ + (True, NotificationSpec(), {}), + + (True, NotificationSpec(instance_conditions={'any_tag': ['test1']}), {'INSTANCE_TAGS': ['test1']}), + (True, NotificationSpec(instance_conditions={'any_tag': ['test1']}), {'INSTANCE_TAGS': ['test1', 'test2']}), + (True, NotificationSpec(instance_conditions={'any_tag': ['test1', 'other']}), {'INSTANCE_TAGS': ['test1', 'test2']}), + (False, NotificationSpec(instance_conditions={'any_tag': ['other']}), {'INSTANCE_TAGS': ['test1', 'test2']}), + + (True, NotificationSpec(instance_conditions={'version': '1.0'}), {'VERSION': '1.0'}), + (True, NotificationSpec(instance_conditions={'version': '==1.0'}), {'VERSION': '1.0'}), + + (True, NotificationSpec(instance_conditions={'version': '>=1.0'}), {'VERSION': '1.5'}), + (True, NotificationSpec(instance_conditions={'version': '>=1.0'}), {'VERSION': '1.0'}), + (True, NotificationSpec(instance_conditions={'version': '>=1.0'}), {'VERSION': '2.1'}), + (False, NotificationSpec(instance_conditions={'version': '>=1.0'}), {'VERSION': '0.9'}), + + (True, NotificationSpec(instance_conditions={'version': '<=1.0'}), {'VERSION': '0.9.7'}), + (True, NotificationSpec(instance_conditions={'version': '<=1.0'}), {'VERSION': '1.0'}), + (False, NotificationSpec(instance_conditions={'version': '<=1.0'}), {'VERSION': 'dev'}), + (False, NotificationSpec(instance_conditions={'version': '<=2'}), {'VERSION': '10.1'}), + + (True, NotificationSpec(instance_conditions={'version': '>1.0'}), {'VERSION': '1.5'}), + (True, NotificationSpec(instance_conditions={'version': '>1.1'}), {'VERSION': '2.1'}), + (False, NotificationSpec(instance_conditions={'version': '>1.0'}), {'VERSION': '0.9'}), + (False, NotificationSpec(instance_conditions={'version': '>1.1'}), {'VERSION': '1.1'}), + + (True, NotificationSpec(instance_conditions={'version': '<1.0'}), {'VERSION': '0.9.7'}), + (False, NotificationSpec(instance_conditions={'version': '<1.0'}), {'VERSION': '1.0'}), + (False, NotificationSpec(instance_conditions={'version': '<1.0'}), {'VERSION': 'dev'}), + (False, NotificationSpec(instance_conditions={'version': '<2'}), {'VERSION': '10.1'}), + + (True, NotificationSpec(instance_conditions={'version': 'dev'}), {'VERSION': 'dev'}), + (False, NotificationSpec(instance_conditions={'version': 'prod'}), {'VERSION': 'dev'}), + ]) + def test_instance_conditions(self, expected, notification, instance_settings): + with override_settings(**instance_settings): + # Test filter + assert NotificationSpec.objects.check_instance_conditions(notification) == expected + + # Test assigned to users + notification.save() + assert notification.usernotification_set.exists() == expected + + def test_visible_for(self): + assert NotificationSpec.objects.create(visible_for_days=10).usernotification_set.first().visible_until.date() == (timezone.now() + timedelta(days=10)).date() + assert NotificationSpec.objects.create(visible_for_days=None).usernotification_set.first().visible_until is None + + +@pytest.mark.django_db +class TestNotificationImport: + @pytest.fixture(autouse=True) + def setUp(self): + self.user_notification = create_user(is_superuser=True) + self.user_no_notification = create_user() + + self.notification_import_data = [ + { + "id": uuid.UUID("fb0f0d11-41d1-4df7-9807-8d77b979adeb"), + "created": "2023-01-26T10:27:07.517334Z", + "updated": "2023-01-26T10:27:07.522920Z", + "active_until": None, + "visible_for_days": 14, + "instance_conditions": {}, + "user_conditions": { + "is_superuser": True + }, + "title": "Test", + "text": "Test", + "link_url": "" + } + ] + async def mock_fetch_notifications_request(): + return self.notification_import_data + with mock.patch('reportcreator_api.notifications.tasks.fetch_notifications_request', mock_fetch_notifications_request), \ + override_settings(NOTIFICATION_IMPORT_URL='https://example.com/'): + yield + + def test_create(self): + async_to_sync(fetch_notifications)(None) + n = NotificationSpec.objects.get() + assertKeysEqual(n, self.notification_import_data[0], ['id', 'title', 'text', 'link_url', + 'active_until', 'visible_for_days', 'instance_conditions', 'user_conditions']) + assert self.user_notification.notifications.get().notification == n + assert self.user_no_notification.notifications.count() == 0 + + def test_refetch(self): + async_to_sync(fetch_notifications)(None) + before = NotificationSpec.objects.get() + async_to_sync(fetch_notifications)(None) + after = NotificationSpec.objects.get() + assertKeysEqual(before, after, ['id', 'created', 'updated', 'active_until']) + + def test_delete(self): + async_to_sync(fetch_notifications)(None) + self.notification_import_data = [] + async_to_sync(fetch_notifications)(None) + after = NotificationSpec.objects.get() + assert after.active_until < timezone.now().date() \ No newline at end of file diff --git a/api/src/reportcreator_api/tests/test_periodic_tasks.py b/api/src/reportcreator_api/tests/test_periodic_tasks.py new file mode 100644 index 0000000..2907f15 --- /dev/null +++ b/api/src/reportcreator_api/tests/test_periodic_tasks.py @@ -0,0 +1,329 @@ +import pytest +from asgiref.sync import async_to_sync +from datetime import timedelta +from unittest import mock +from pytest_django.asserts import assertNumQueries +from django.test import override_settings +from django.urls import reverse +from django.utils import timezone +from rest_framework.test import APIClient +from reportcreator_api.pentests.tasks import cleanup_project_files, cleanup_unreferenced_images_and_files, cleanup_usernotebook_files, reset_stale_archive_restores + +from reportcreator_api.tasks.models import PeriodicTask, TaskStatus +from reportcreator_api.tests.mock import create_archived_project, create_project, create_user, mock_time + + +def task_success(): + pass + + +def task_failure(): + raise Exception('Failed task') + + +@pytest.mark.django_db +class TestPeriodicTaskScheduling: + @pytest.fixture(autouse=True) + def setUp(self): + with mock.patch('reportcreator_api.tests.test_periodic_tasks.task_success') as self.mock_task_success, \ + mock.patch('reportcreator_api.tests.test_periodic_tasks.task_failure', side_effect=Exception) as self.mock_task_failure, \ + override_settings(PERIODIC_TASKS=[ + { + 'id': 'task_success', + 'task': 'reportcreator_api.tests.test_periodic_tasks.task_success', + 'schedule': timedelta(days=1), + }, + { + 'id': 'task_failure', + 'task': 'reportcreator_api.tests.test_periodic_tasks.task_failure', + + } + ]): + yield + + def run_tasks(self): + res = APIClient().get(reverse('utils-healthcheck')) + assert res.status_code == 200 + + def test_initial_run(self): + self.run_tasks() + assert PeriodicTask.objects.all().count() == 2 + assert PeriodicTask.objects.get(id='task_success').status == TaskStatus.SUCCESS + assert PeriodicTask.objects.get(id='task_failure').status == TaskStatus.FAILED + assert self.mock_task_success.call_count == 1 + assert self.mock_task_failure.call_count == 1 + + def test_not_rerun_until_schedule(self): + prev = PeriodicTask.objects.create(id='task_success', status=TaskStatus.SUCCESS, started=timezone.now(), completed=timezone.now()) + self.run_tasks() + t = PeriodicTask.objects.get(id='task_success') + assert t.status == TaskStatus.SUCCESS + assert t.started == prev.started + assert not self.mock_task_success.called + + def test_rerun_after_schedule(self): + PeriodicTask.objects.create(id='task_success', status=TaskStatus.SUCCESS, started=timezone.now() - timedelta(days=2), completed=timezone.now()- timedelta(days=2)) + start_time = timezone.now() + self.run_tasks() + t = PeriodicTask.objects.get(id='task_success') + assert t.status == TaskStatus.SUCCESS + assert t.started > start_time + assert t.completed > start_time + assert self.mock_task_success.call_count == 1 + + def test_retry(self): + PeriodicTask.objects.create(id='task_failure', status=TaskStatus.FAILED, started=timezone.now() - timedelta(hours=2), completed=timezone.now()- timedelta(hours=2)) + start_time = timezone.now() + self.run_tasks() + t = PeriodicTask.objects.get(id='task_failure') + assert t.status == TaskStatus.FAILED + assert t.started > start_time + assert t.completed > start_time + assert self.mock_task_failure.call_count == 1 + + def test_running_not_scheduled(self): + running = PeriodicTask.objects.create(id='task_success', status=TaskStatus.RUNNING, started=timezone.now()) + self.run_tasks() + t = PeriodicTask.objects.get(id='task_success') + assert t.status == TaskStatus.RUNNING + assert t.started == running.started + assert t.completed == running.completed + assert not self.mock_task_success.called + + def test_running_timeout_retry(self): + PeriodicTask.objects.create(id='task_success', status=TaskStatus.RUNNING, started=timezone.now() - timedelta(hours=2)) + start_time = timezone.now() + self.run_tasks() + t = PeriodicTask.objects.get(id='task_success') + assert t.status == TaskStatus.SUCCESS + assert t.started > start_time + assert t.completed > start_time + assert self.mock_task_success.call_count == 1 + + def test_db_query_performance(self): + self.run_tasks() + + with assertNumQueries(1): + async_to_sync(PeriodicTask.objects.run_all_pending_tasks)() + + +@pytest.mark.django_db +class TestCleanupUnreferencedFiles: + def file_exists(self, file_obj): + try: + file_obj.file.read() + return True + except FileNotFoundError: + return False + + def run_cleanup_project_files(self, num_queries, last_success=None): + with assertNumQueries(num_queries): + async_to_sync(cleanup_project_files)(task_info={ + 'model': PeriodicTask(last_success=last_success) + }) + + def run_cleanup_user_files(self, num_queries, last_success=None): + with assertNumQueries(num_queries): + async_to_sync(cleanup_usernotebook_files)(task_info={ + 'model': PeriodicTask(last_success=last_success) + }) + + def test_unreferenced_files_removed(self): + with mock_time(before=timedelta(days=10)): + project = create_project( + images_kwargs=[{'name': 'image.png'}], + files_kwargs=[{'name': 'file.pdf'}] + ) + project_image = project.images.first() + project_file = project.files.first() + user = create_user( + images_kwargs=[{'name': 'image.png'}], + ) + user_image = user.images.first() + # self.run_cleanup(num_queries=2 + 6 + 3 * 2 + 3) + self.run_cleanup_project_files(num_queries=1 + 4 + 2 * 2 + 2 * 1) + self.run_cleanup_user_files(num_queries=1 + 2 + 1 * 2 + 1 * 1) + # Deleted from DB + assert project.images.count() == 0 + assert project.files.count() == 0 + assert user.images.count() == 0 + # Deleted from FS + assert not self.file_exists(project_image) + assert not self.file_exists(project_file) + assert not self.file_exists(user_image) + + def test_recently_created_unreferenced_files_not_removed(self): + project = create_project( + images_kwargs=[{'name': 'image.png'}], + files_kwargs=[{'name': 'file.pdf'}] + ) + user = create_user( + images_kwargs=[{'name': 'image.png'}] + ) + self.run_cleanup_project_files(num_queries=1) + self.run_cleanup_user_files(num_queries=1) + # DB objects exist + assert project.images.count() == 1 + assert project.files.count() == 1 + assert user.images.count() == 1 + # Files exist + assert self.file_exists(project.images.first()) + assert self.file_exists(project.files.first()) + assert self.file_exists(user.images.first()) + + def test_referenced_files_in_section_not_removed(self): + with mock_time(before=timedelta(days=10)): + project = create_project( + report_data={'field_markdown': '![](/images/name/image.png)\n[](/files/name/file.pdf)'}, + images_kwargs=[{'name': 'image.png'}], + files_kwargs=[{'name': 'file.pdf'}] + ) + self.run_cleanup_project_files(num_queries=1 + 4) + assert project.images.count() == 1 + assert project.files.count() == 1 + + def test_referenced_files_in_finding_not_removed(self): + with mock_time(before=timedelta(days=10)): + project = create_project( + findings_kwargs=[{'data': {'description': '![](/images/name/image.png)\n[](/files/name/file.pdf)'}}], + images_kwargs=[{'name': 'image.png'}], + files_kwargs=[{'name': 'file.pdf'}] + ) + self.run_cleanup_project_files(num_queries=1 + 4) + assert project.images.count() == 1 + assert project.files.count() == 1 + + def test_referenced_files_in_notes_not_removed(self): + with mock_time(before=timedelta(days=10)): + project = create_project( + notes_kwargs=[{'text': '![](/images/name/image.png)\n[](/files/name/file.pdf)'}], + images_kwargs=[{'name': 'image.png'}], + files_kwargs=[{'name': 'file.pdf'}] + ) + self.run_cleanup_project_files(num_queries=1 + 4) + assert project.images.count() == 1 + assert project.files.count() == 1 + + def test_referenced_files_in_user_notes_not_removed(self): + with mock_time(before=timedelta(days=10)): + user = create_user( + notes_kwargs=[{'text': '![](/images/name/image.png)'}], + images_kwargs=[{'name': 'image.png'}], + ) + self.run_cleanup_user_files(num_queries=1 + 2) + assert user.images.count() == 1 + + def test_file_referenced_by_multiple_projects(self): + with mock_time(before=timedelta(days=10)): + project_unreferenced = create_project( + images_kwargs=[{'name': 'image.png'}], + files_kwargs=[{'name': 'file.pdf'}] + ) + project_referenced = project_unreferenced.copy() + project_referenced.update_data({'field_markdown': '![](/images/name/image.png)\n[](/files/name/file.pdf)'}) + project_referenced.save() + self.run_cleanup_project_files(num_queries=1 + 4 + 2 * 2 + 2 * 1) + + # Files deleted for unreferenced project + assert project_unreferenced.images.count() == 0 + assert project_unreferenced.files.count() == 0 + # Files not deleted for referenced project + assert project_referenced.images.count() == 1 + assert project_referenced.files.count() == 1 + # Files still present on filesystem + assert self.file_exists(project_referenced.images.first()) + assert self.file_exists(project_referenced.files.first()) + + def test_optimized_cleanup(self): + with mock_time(before=timedelta(days=20)): + project_old = create_project( + images_kwargs=[{'name': 'image.png'}], + files_kwargs=[{'name': 'file.pdf'}] + ) + user_old = create_user( + images_kwargs=[{'name': 'image.png'}], + ) + project_new = create_project( + images_kwargs=[{'name': 'image.png'}], + files_kwargs=[{'name': 'file.pdf'}] + ) + user_new = create_user( + images_kwargs=[{'name': 'image.png'}], + ) + with mock_time(before=timedelta(days=10)): + project_new.save() + user_new.notes.first().save() + last_task_run = timezone.now() - timedelta(days=15) + self.run_cleanup_project_files(num_queries=1 + 4 + 2 * 2 + 2 * 1, last_success=last_task_run) + self.run_cleanup_user_files(num_queries=1 + 2 + 2 * 1 + 1 * 1, last_success=last_task_run) + + # Old project should be ignored because it was already cleaned in the last run + assert project_old.images.count() == 1 + assert project_old.files.count() == 1 + assert user_old.images.count() == 1 + # New project should be cleaned because it was modified after the last run + assert project_new.images.count() == 0 + assert project_new.files.count() == 0 + assert user_new.images.count() == 0 + + +@pytest.mark.django_db +class TestResetStaleArchiveRestore: + def test_reset_stale(self): + with mock_time(before=timedelta(days=10)): + archive = create_archived_project(project=create_project(members=[create_user(public_key=True) for _ in range(2)])) + keypart = archive.key_parts.first() + keypart.decrypted_at = timezone.now() + keypart.key_part = {'key_id': 'shamir-key-id', 'key': 'dummy-key'} + keypart.save() + + reset_stale_archive_restores(None) + + keypart.refresh_from_db() + assert not keypart.is_decrypted + assert keypart.decrypted_at is None + assert keypart.key_part is None + + def test_reset_not_stale(self): + with mock_time(before=timedelta(days=10)): + archive = create_archived_project(project=create_project(members=[create_user(public_key=True) for _ in range(3)])) + keypart1 = archive.key_parts.first() + keypart1.decrypted_at = timezone.now() + keypart1.key_part = {'key_id': 'shamir-key-id', 'key': 'dummy-key'} + keypart1.save() + + keypart2 = archive.key_parts.exclude(pk=keypart1.pk).first() + keypart2.decrypted_at = timezone.now() + keypart2.key_part = {'key_id': 'shamir-key-id-2', 'key': 'dummy-key2'} + keypart2.save() + + reset_stale_archive_restores(None) + + keypart1.refresh_from_db() + assert keypart1.is_decrypted + assert keypart1.decrypted_at is not None + assert keypart1.key_part is not None + keypart2.refresh_from_db() + assert keypart2.is_decrypted + assert keypart2.decrypted_at is not None + assert keypart2.key_part is not None + + def test_reset_one_but_not_other(self): + with mock_time(before=timedelta(days=10)): + keypart1 = create_archived_project(project=create_project(members=[create_user(public_key=True) for _ in range(2)])).key_parts.first() + keypart1.decrypted_at = timezone.now() + keypart1.key_part = {'key_id': 'shamir-key-id', 'key': 'dummy-key'} + keypart1.save() + + keypart2 = create_archived_project(project=create_project(members=[create_user(public_key=True) for _ in range(2)])).key_parts.first() + keypart2.decrypted_at = timezone.now() + keypart2.key_part = {'key_id': 'shamir-key-id', 'key': 'dummy-key'} + keypart2.save() + + reset_stale_archive_restores(None) + + keypart1.refresh_from_db() + assert not keypart1.is_decrypted + keypart2.refresh_from_db() + assert keypart2.is_decrypted + diff --git a/api/src/reportcreator_api/tests/test_rendering.py b/api/src/reportcreator_api/tests/test_rendering.py new file mode 100644 index 0000000..0bc6e12 --- /dev/null +++ b/api/src/reportcreator_api/tests/test_rendering.py @@ -0,0 +1,156 @@ +import pytest +import io +import re +import pikepdf +from asgiref.sync import async_to_sync +from unittest import mock +from pytest_django.asserts import assertHTMLEqual +from django.test import override_settings + +from reportcreator_api.tests.mock import create_project_type, create_project, create_user, create_finding +from reportcreator_api.tasks.rendering.entry import render_pdf, PdfRenderingError +from reportcreator_api.tasks.rendering.render import render_to_html +from reportcreator_api.utils.utils import merge + + +@pytest.mark.django_db +class TestHtmlRendering: + @pytest.fixture(autouse=True) + def setUp(self): + self.user = create_user() + self.project_type = create_project_type() + self.project = create_project(project_type=self.project_type, members=[self.user], findings_kwargs=[]) + self.finding = create_finding(project=self.project) + + with override_settings(CELERY_TASK_ALWAYS_EAGER=True): + yield + + def render_html(self, template, additional_data={}): + def render_only_html(data, language, **kwargs): + html, msgs = render_to_html(template=template, data=merge(data, additional_data), language=language) + return html.encode() if html else None, msgs + + with mock.patch('reportcreator_api.tasks.rendering.render.render_pdf', render_only_html): + html = async_to_sync(render_pdf)(self.project).decode() + return self.extract_html_part(html) + + def extract_html_part(self, html, start=None, end=None): + if not start and not end: + body_start = html.index('
', body_start + 1) + 6 + return html[content_start:html.index('
')] + else: + return html[html.index(start):html.index(end) + len(end)] + + @pytest.mark.parametrize('template,html', [ + ('{{ report.field_string }}', lambda self: self.project.data['field_string']), + ('{{ report.field_int }}', lambda self: str(self.project.data['field_int'])), + ('{{ report.field_enum.value }}', lambda self: self.project.data['field_enum']), + ('{{ findings[0].cvss.vector }}', lambda self: self.finding.data['cvss']), + ('{{ findings[0].cvss.score }}', lambda self: str(self.finding.risk_score)), + ('{{ data.pentesters[0].name }}', lambda self: self.user.name), + ('{{ data.pentesters[0].email }}', lambda self: self.user.email), + ('', lambda self: ''.join(self.project.members.all()[0].roles)), + ('', lambda self: self.finding.title), + ('{{ capitalize("hello there") }}', "Hello there"), + ("{{ formatDate('2022-09-21', 'iso') }}", "2022-09-21"), + ("{{ formatDate('2022-09-21', 'short', 'de-DE') }}", "21.09.22"), + ("{{ formatDate('2022-09-21', 'medium', 'de-DE') }}", "21.09.2022"), + ("{{ formatDate('2022-09-21', 'long', 'de-DE') }}", "21. September 2022"), + ("{{ formatDate('2022-09-21', 'full', 'de-DE') }}", "Mittwoch, 21. September 2022"), + ("{{ formatDate('2022-09-21', 'short', 'en-US') }}", "9/21/22"), + ("{{ formatDate('2022-09-21', 'medium', 'en-US') }}", "Sep 21, 2022"), + ("{{ formatDate('2022-09-21', 'long', 'en-US') }}", "September 21, 2022"), + ("{{ formatDate('2022-09-21', 'full', 'en-US') }}", "Wednesday, September 21, 2022"), + ("{{ formatDate('2022-09-21', {year: '2-digit', month: 'narrow', day: '2-digit', numberingSystem: 'latn'}, 'en-US') }}", "S 21, 22"), + ]) + def test_variables_rendering(self, template, html): + if callable(html): + html = html(self) + actual_html = self.render_html(template) + assert actual_html == html + + @pytest.mark.parametrize('template', [ + '

', + '{{ report.nonexistent_variable.prop }}' + ]) + def test_template_error(self, template): + with pytest.raises(PdfRenderingError): + self.project_type.report_template = template + async_to_sync(render_pdf)(project=self.project) + + def test_markdown_rendering(self): + assertHTMLEqual( + self.render_html('', {'md': 'text _with_ **markdown** `code`'}), + '

text with markdown code

' + ) + + def test_toc_rendering(self): + html = self.render_html(""" + +
+

Table of Contents

+
    + +
+
+
+

H1

+

H1.1

+

H1.1.1

+

H1.2

+

H1.3

+

H2

+

H2.1

+

Appendix

+

A.1

+ """) + assertHTMLEqual(self.extract_html_part(html, '
    ', '
'), """ + + """) + + def test_chart_rendering(self): + html = self.render_html(""" + """) + assert re.fullmatch(r'^\s*\s*$', html) + + @pytest.mark.parametrize('password,encrypted', [ + ('password', True), + ('', False) + ]) + def test_pdf_encryption(self, password, encrypted): + pdf_data = async_to_sync(render_pdf)(project=self.project, password=password) + with pikepdf.Pdf.open(io.BytesIO(pdf_data), password=password) as pdf: + assert pdf.is_encrypted == encrypted + diff --git a/api/src/reportcreator_api/tests/utils.py b/api/src/reportcreator_api/tests/utils.py new file mode 100644 index 0000000..8924f2c --- /dev/null +++ b/api/src/reportcreator_api/tests/utils.py @@ -0,0 +1,9 @@ +from reportcreator_api.utils.utils import get_key_or_attr + + +def assertKeysEqual(a, b, keys): + for k in keys: + va = get_key_or_attr(a, k) + vb = get_key_or_attr(b, k) + assert va == vb, f'Key "{k}" is not equal' + diff --git a/api/src/reportcreator_api/users/__init__.py b/api/src/reportcreator_api/users/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/api/src/reportcreator_api/users/admin.py b/api/src/reportcreator_api/users/admin.py new file mode 100644 index 0000000..969b8d3 --- /dev/null +++ b/api/src/reportcreator_api/users/admin.py @@ -0,0 +1,46 @@ +from django.contrib import admin +from django.contrib.auth.admin import UserAdmin as BaseUserAdmin +from django.utils.translation import gettext_lazy as _ + +from reportcreator_api.utils.admin import BaseAdmin, admin_change_url +from reportcreator_api.users.models import PentestUser, MFAMethod, AuthIdentity + + +@admin.register(PentestUser) +class PentestUserAdmin(BaseUserAdmin): + fieldsets = ( + (None, {"fields": ("username", "password")}), + (_("Personal info"), {"fields": ( "email", "phone", "mobile", "title_before", "first_name", "middle_name", "last_name", "title_after")}), + ( + _("Permissions"), + { + "fields": ( + "is_active", + "is_staff", + "is_superuser", + "is_system_user", + "is_user_manager", + "is_designer", + "is_template_editor", + "is_guest", + "is_global_archiver", + "groups", + "user_permissions", + ), + }, + ), + (_("Important dates"), {"fields": ("last_login", "date_joined")}), + ) + + +@admin.register(MFAMethod) +class MFAMethodAdmin(BaseAdmin): + def link_user(self, obj): + return admin_change_url(obj.user.name, 'users', 'pentestuser', obj.user.id) + + +@admin.register(AuthIdentity) +class AuthIdentityAdmin(BaseAdmin): + def link_user(self, obj): + return admin_change_url(obj.user.name, 'users', 'pentestuser', obj.user.id) + diff --git a/api/src/reportcreator_api/users/apps.py b/api/src/reportcreator_api/users/apps.py new file mode 100644 index 0000000..706e1eb --- /dev/null +++ b/api/src/reportcreator_api/users/apps.py @@ -0,0 +1,10 @@ +from django.apps import AppConfig + + +class UsersConfig(AppConfig): + default_auto_field = 'django.db.models.BigAutoField' + name = 'reportcreator_api.users' + + def ready(self) -> None: + from . import signals # noqa + diff --git a/api/src/reportcreator_api/users/backends/__init__.py b/api/src/reportcreator_api/users/backends/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/api/src/reportcreator_api/users/backends/session.py b/api/src/reportcreator_api/users/backends/session.py new file mode 100644 index 0000000..de69e97 --- /dev/null +++ b/api/src/reportcreator_api/users/backends/session.py @@ -0,0 +1,9 @@ +from django.contrib.sessions.backends.db import SessionStore as BaseDbSessionStore +from reportcreator_api.users.models import Session + + +class SessionStore(BaseDbSessionStore): + @classmethod + def get_model_class(cls): + return Session + diff --git a/api/src/reportcreator_api/users/management/__init__.py b/api/src/reportcreator_api/users/management/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/api/src/reportcreator_api/users/management/commands/__init__.py b/api/src/reportcreator_api/users/management/commands/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/api/src/reportcreator_api/users/management/commands/createorupdateuser.py b/api/src/reportcreator_api/users/management/commands/createorupdateuser.py new file mode 100644 index 0000000..d8977b4 --- /dev/null +++ b/api/src/reportcreator_api/users/management/commands/createorupdateuser.py @@ -0,0 +1,46 @@ +import os + +from django.core.management import CommandError, BaseCommand +from reportcreator_api.users.models import PentestUser + + +class Command(BaseCommand): + help = 'Create a superuser, or update the password for an existing superuser.' + + def add_arguments(self, parser): + super().add_arguments(parser) + parser.add_argument( + '--username', dest='username', default=None, + ) + parser.add_argument( + '--password', dest='password', default=None, + help='Specifies the password for the user.', + ) + parser.add_argument( + '--superuser', dest='is_superuser', action='store_true', default=False + ) + parser.add_argument( + '--system', dest='is_system_user', action='store_true', default=False + ) + + def handle(self, username, password, is_superuser, is_system_user, *args, **kwargs): + password = password or os.environ.get('DJANGO_SUPERUSER_PASSWORD') + + if not password or not username: + raise CommandError("username and password (DJANGO_SUPERUSER_PASSWORD) must be set") + if len(password) < 15: + raise CommandError("password must be at least 15 characters") + + user = PentestUser.objects.filter(username=username).first() + if not user: + user = PentestUser(username=username) + + user.set_password(password) + if is_superuser: + user.is_superuser = True + user.is_staff = True + if is_system_user: + user.is_system_user = True + user.save() + + self.stdout.write("User created or updated") diff --git a/api/src/reportcreator_api/users/migrations/0001_initial.py b/api/src/reportcreator_api/users/migrations/0001_initial.py new file mode 100644 index 0000000..fb2be86 --- /dev/null +++ b/api/src/reportcreator_api/users/migrations/0001_initial.py @@ -0,0 +1,50 @@ +# Generated by Django 4.0.4 on 2022-07-08 11:08 + +import django.contrib.auth.models +import django.contrib.auth.validators +from django.db import migrations, models +import django.utils.timezone +import phonenumber_field.modelfields +import reportcreator_api.utils.models +import uuid + + +class Migration(migrations.Migration): + + initial = True + + dependencies = [ + ('auth', '0012_alter_user_first_name_max_length'), + ] + + operations = [ + migrations.CreateModel( + name='PentestUser', + fields=[ + ('password', models.CharField(max_length=128, verbose_name='password')), + ('last_login', models.DateTimeField(blank=True, null=True, verbose_name='last login')), + ('is_superuser', models.BooleanField(default=False, help_text='Designates that this user has all permissions without explicitly assigning them.', verbose_name='superuser status')), + ('username', models.CharField(error_messages={'unique': 'A user with that username already exists.'}, help_text='Required. 150 characters or fewer. Letters, digits and @/./+/-/_ only.', max_length=150, unique=True, validators=[django.contrib.auth.validators.UnicodeUsernameValidator()], verbose_name='username')), + ('first_name', models.CharField(blank=True, max_length=150, verbose_name='first name')), + ('last_name', models.CharField(blank=True, max_length=150, verbose_name='last name')), + ('is_staff', models.BooleanField(default=False, help_text='Designates whether the user can log into this admin site.', verbose_name='staff status')), + ('is_active', models.BooleanField(default=True, help_text='Designates whether this user should be treated as active. Unselect this instead of deleting accounts.', verbose_name='active')), + ('date_joined', models.DateTimeField(default=django.utils.timezone.now, verbose_name='date joined')), + ('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)), + ('created', models.DateTimeField(auto_now_add=True)), + ('updated', models.DateTimeField(auto_now=True)), + ('email', models.EmailField(blank=True, max_length=254, null=True, verbose_name='Email address')), + ('phone', phonenumber_field.modelfields.PhoneNumberField(blank=True, max_length=128, null=True, region=None, verbose_name='Phone number')), + ('groups', models.ManyToManyField(blank=True, help_text='The groups this user belongs to. A user will get all permissions granted to each of their groups.', related_name='user_set', related_query_name='user', to='auth.group', verbose_name='groups')), + ('user_permissions', models.ManyToManyField(blank=True, help_text='Specific permissions for this user.', related_name='user_set', related_query_name='user', to='auth.permission', verbose_name='user permissions')), + ], + options={ + 'ordering': ['-created'], + 'abstract': False, + }, + bases=(models.Model, reportcreator_api.utils.models.ModelDiffMixin), + managers=[ + ('objects', django.contrib.auth.models.UserManager()), + ], + ), + ] diff --git a/api/src/reportcreator_api/users/migrations/0002_pentestuser_middle_name_pentestuser_title_after_and_more.py b/api/src/reportcreator_api/users/migrations/0002_pentestuser_middle_name_pentestuser_title_after_and_more.py new file mode 100644 index 0000000..99d387a --- /dev/null +++ b/api/src/reportcreator_api/users/migrations/0002_pentestuser_middle_name_pentestuser_title_after_and_more.py @@ -0,0 +1,38 @@ +# Generated by Django 4.0.4 on 2022-07-25 19:38 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('users', '0001_initial'), + ] + + operations = [ + migrations.AddField( + model_name='pentestuser', + name='middle_name', + field=models.CharField(blank=True, max_length=255, null=True, verbose_name='Middle name'), + ), + migrations.AddField( + model_name='pentestuser', + name='title_after', + field=models.CharField(blank=True, max_length=255, null=True, verbose_name='Title (after)'), + ), + migrations.AddField( + model_name='pentestuser', + name='title_before', + field=models.CharField(blank=True, max_length=255, null=True, verbose_name='Title (before)'), + ), + migrations.AddField( + model_name='pentestuser', + name='mobile', + field=models.CharField(blank=True, max_length=255, null=True, verbose_name='Phone number (mobile)'), + ), + migrations.AlterField( + model_name='pentestuser', + name='phone', + field=models.CharField(blank=True, max_length=255, null=True, verbose_name='Phone number'), + ), + ] diff --git a/api/src/reportcreator_api/users/migrations/0003_pentestuser_is_designer_and_more.py b/api/src/reportcreator_api/users/migrations/0003_pentestuser_is_designer_and_more.py new file mode 100644 index 0000000..de723d3 --- /dev/null +++ b/api/src/reportcreator_api/users/migrations/0003_pentestuser_is_designer_and_more.py @@ -0,0 +1,28 @@ +# Generated by Django 4.0.7 on 2022-08-24 13:56 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('users', '0002_pentestuser_middle_name_pentestuser_title_after_and_more'), + ] + + operations = [ + migrations.AddField( + model_name='pentestuser', + name='is_designer', + field=models.BooleanField(db_index=True, default=False), + ), + migrations.AddField( + model_name='pentestuser', + name='is_template_editor', + field=models.BooleanField(db_index=True, default=False), + ), + migrations.AddField( + model_name='pentestuser', + name='is_user_manager', + field=models.BooleanField(db_index=True, default=False), + ), + ] diff --git a/api/src/reportcreator_api/users/migrations/0004_alter_pentestuser_created.py b/api/src/reportcreator_api/users/migrations/0004_alter_pentestuser_created.py new file mode 100644 index 0000000..8f80bb7 --- /dev/null +++ b/api/src/reportcreator_api/users/migrations/0004_alter_pentestuser_created.py @@ -0,0 +1,19 @@ +# Generated by Django 4.0.7 on 2022-10-13 08:10 + +from django.db import migrations, models +import reportcreator_api.utils.models + + +class Migration(migrations.Migration): + + dependencies = [ + ('users', '0003_pentestuser_is_designer_and_more'), + ] + + operations = [ + migrations.AlterField( + model_name='pentestuser', + name='created', + field=models.DateTimeField(default=reportcreator_api.utils.models.now, editable=False), + ), + ] diff --git a/api/src/reportcreator_api/users/migrations/0005_guest_users.py b/api/src/reportcreator_api/users/migrations/0005_guest_users.py new file mode 100644 index 0000000..bdcaccf --- /dev/null +++ b/api/src/reportcreator_api/users/migrations/0005_guest_users.py @@ -0,0 +1,25 @@ +# Generated by Django 4.1.3 on 2022-12-06 14:33 + +from django.db import migrations, models +import reportcreator_api.users.querysets + + +class Migration(migrations.Migration): + + dependencies = [ + ('users', '0004_alter_pentestuser_created'), + ] + + operations = [ + migrations.AlterModelManagers( + name='pentestuser', + managers=[ + ('objects', reportcreator_api.users.querysets.PentestUserManager()), + ], + ), + migrations.AddField( + model_name='pentestuser', + name='is_guest', + field=models.BooleanField(db_index=True, default=False), + ), + ] diff --git a/api/src/reportcreator_api/users/migrations/0006_db_encryption.py b/api/src/reportcreator_api/users/migrations/0006_db_encryption.py new file mode 100644 index 0000000..0615cf5 --- /dev/null +++ b/api/src/reportcreator_api/users/migrations/0006_db_encryption.py @@ -0,0 +1,71 @@ +# Generated by Django 4.1.2 on 2022-10-27 17:26 + +from django.db import migrations, models +import reportcreator_api.archive.crypto.fields +import reportcreator_api.users.querysets + + +def migrate_to_encryption(apps, schema_editor): + PentestUser = apps.get_model('users', 'PentestUser') + + users = list(PentestUser.objects.all()) + for u in users: + u.password_new = u.password + PentestUser.objects.bulk_update(users, ['password_new']) + + +def reverse_migrate_from_encryption(apps, schema_editor): + PentestUser = apps.get_model('users', 'PentestUser') + + users = list(PentestUser.objects.all()) + for u in users: + u.password = u.password_new + PentestUser.objects.bulk_update(users, ['password']) + + +class Migration(migrations.Migration): + + dependencies = [ + ('users', '0005_guest_users'), + ] + + operations = [ + migrations.CreateModel( + name='Session', + fields=[ + ('expire_date', models.DateTimeField(db_index=True, verbose_name='expire date')), + ('session_key', reportcreator_api.archive.crypto.fields.EncryptedField(base_field=models.CharField(max_length=40, verbose_name='session key'), editable=True)), + ('session_data', reportcreator_api.archive.crypto.fields.EncryptedField(base_field=models.TextField(verbose_name='session data'), editable=True)), + ('session_key_hash', models.BinaryField(max_length=32, primary_key=True, serialize=False)), + ], + options={ + 'verbose_name': 'session', + 'verbose_name_plural': 'sessions', + 'abstract': False, + }, + managers=[ + ('objects', reportcreator_api.users.querysets.SessionManager()), + ], + ), + migrations.AddField( + model_name='pentestuser', + name='password_new', + field=reportcreator_api.archive.crypto.fields.EncryptedField(base_field=models.CharField(max_length=128, default='', verbose_name='password'), editable=True), + preserve_default=False, + ), + migrations.RunPython(code=migrate_to_encryption, reverse_code=reverse_migrate_from_encryption), + migrations.RemoveField( + model_name='pentestuser', + name='password', + ), + migrations.RenameField( + model_name='pentestuser', + old_name='password_new', + new_name='password', + ), + migrations.AlterField( + model_name='pentestuser', + name='password', + field=reportcreator_api.archive.crypto.fields.EncryptedField(base_field=models.CharField(max_length=128, verbose_name='password'), editable=True), + ), + ] diff --git a/api/src/reportcreator_api/users/migrations/0007_mfamethod.py b/api/src/reportcreator_api/users/migrations/0007_mfamethod.py new file mode 100644 index 0000000..a4f60c5 --- /dev/null +++ b/api/src/reportcreator_api/users/migrations/0007_mfamethod.py @@ -0,0 +1,37 @@ +# Generated by Django 4.1.3 on 2022-12-17 12:08 + +from django.conf import settings +from django.db import migrations, models +import django.db.models.deletion +import reportcreator_api.archive.crypto.fields +import reportcreator_api.users.querysets +import reportcreator_api.utils.models +import uuid + + +class Migration(migrations.Migration): + + dependencies = [ + ('users', '0006_db_encryption'), + ] + + operations = [ + migrations.CreateModel( + name='MFAMethod', + fields=[ + ('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)), + ('created', models.DateTimeField(default=reportcreator_api.utils.models.now, editable=False)), + ('updated', models.DateTimeField(auto_now=True)), + ('method_type', models.CharField(choices=[('totp', 'TOTP'), ('fido2', 'FIDO2'), ('backup', 'Backup codes')], max_length=255)), + ('is_primary', models.BooleanField(default=False)), + ('name', models.CharField(blank=True, default='', max_length=255)), + ('data', reportcreator_api.archive.crypto.fields.EncryptedField(base_field=models.JSONField(), editable=True)), + ('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='mfa_methods', to=settings.AUTH_USER_MODEL)), + ], + options={ + 'ordering': ['-created'], + 'abstract': False, + }, + bases=(reportcreator_api.utils.models.ModelDiffMixin, models.Model), + ), + ] diff --git a/api/src/reportcreator_api/users/migrations/0008_pentestuser_is_system_user.py b/api/src/reportcreator_api/users/migrations/0008_pentestuser_is_system_user.py new file mode 100644 index 0000000..d039f7a --- /dev/null +++ b/api/src/reportcreator_api/users/migrations/0008_pentestuser_is_system_user.py @@ -0,0 +1,18 @@ +# Generated by Django 4.1.3 on 2023-01-02 08:24 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('users', '0007_mfamethod'), + ] + + operations = [ + migrations.AddField( + model_name='pentestuser', + name='is_system_user', + field=models.BooleanField(db_index=True, default=False), + ), + ] diff --git a/api/src/reportcreator_api/users/migrations/0009_authidentity.py b/api/src/reportcreator_api/users/migrations/0009_authidentity.py new file mode 100644 index 0000000..2f23845 --- /dev/null +++ b/api/src/reportcreator_api/users/migrations/0009_authidentity.py @@ -0,0 +1,34 @@ +# Generated by Django 4.1.3 on 2023-01-04 08:52 + +from django.conf import settings +from django.db import migrations, models +import django.db.models.deletion +import reportcreator_api.utils.models +import uuid + + +class Migration(migrations.Migration): + + dependencies = [ + ('users', '0008_pentestuser_is_system_user'), + ] + + operations = [ + migrations.CreateModel( + name='AuthIdentity', + fields=[ + ('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)), + ('created', models.DateTimeField(default=reportcreator_api.utils.models.now, editable=False)), + ('updated', models.DateTimeField(auto_now=True)), + ('provider', models.CharField(max_length=255)), + ('identifier', models.CharField(max_length=255)), + ('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='auth_identities', to=settings.AUTH_USER_MODEL)), + ], + options={ + 'ordering': ['-created'], + 'abstract': False, + 'unique_together': {('provider', 'identifier')}, + }, + bases=(reportcreator_api.utils.models.ModelDiffMixin, models.Model), + ), + ] diff --git a/api/src/reportcreator_api/users/migrations/0010_pentestuser_is_global_archiver.py b/api/src/reportcreator_api/users/migrations/0010_pentestuser_is_global_archiver.py new file mode 100644 index 0000000..43ece79 --- /dev/null +++ b/api/src/reportcreator_api/users/migrations/0010_pentestuser_is_global_archiver.py @@ -0,0 +1,18 @@ +# Generated by Django 4.2b1 on 2023-03-23 08:19 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('users', '0009_authidentity'), + ] + + operations = [ + migrations.AddField( + model_name='pentestuser', + name='is_global_archiver', + field=models.BooleanField(db_index=True, default=False), + ), + ] diff --git a/api/src/reportcreator_api/users/migrations/__init__.py b/api/src/reportcreator_api/users/migrations/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/api/src/reportcreator_api/users/models.py b/api/src/reportcreator_api/users/models.py new file mode 100644 index 0000000..6f9360f --- /dev/null +++ b/api/src/reportcreator_api/users/models.py @@ -0,0 +1,162 @@ +import functools +import hmac +import pyotp +import qrcode +import qrcode.image.pil +from urllib.parse import urlparse +from io import BytesIO +from base64 import b64encode +from fido2.server import Fido2Server, _verify_origin_for_rp +from fido2.webauthn import PublicKeyCredentialRpEntity +from django.conf import settings +from django.db import models +from django.contrib.auth.models import AbstractUser +from django.contrib.sessions.base_session import AbstractBaseSession +from django.utils.translation import gettext_lazy as _ + +from reportcreator_api.utils import license +from reportcreator_api.archive.crypto.fields import EncryptedField +from reportcreator_api.utils.models import BaseModel +from reportcreator_api.users import querysets + + +class PentestUser(BaseModel, AbstractUser): + password = EncryptedField(base_field=models.CharField(_("password"), max_length=128)) + + middle_name = models.CharField(_('Middle name'), max_length=255, null=True, blank=True) + title_before = models.CharField(_('Title (before)'), max_length=255, null=True, blank=True) + title_after = models.CharField(_('Title (after)'), max_length=255, null=True, blank=True) + + email = models.EmailField(_("Email address"), null=True, blank=True) + phone = models.CharField(_('Phone number'), max_length=255, null=True, blank=True) + mobile = models.CharField(_('Phone number (mobile)'), max_length=255, null=True, blank=True) + + is_designer = models.BooleanField(default=False, db_index=True) + is_template_editor = models.BooleanField(default=False, db_index=True) + is_user_manager = models.BooleanField(default=False, db_index=True) + is_guest = models.BooleanField(default=False, db_index=True) + is_system_user = models.BooleanField(default=False, db_index=True) + is_global_archiver = models.BooleanField(default=False, db_index=True) + + REQUIRED_FIELDS = [] + + objects = querysets.PentestUserManager() + + @property + def name(self): + return ((self.title_before + ' ') if self.title_before else '') + \ + ((self.first_name + ' ') if self.first_name else '') + \ + ((self.middle_name + ' ') if self.middle_name else '') + \ + (self.last_name or '') + \ + ((', ' + self.title_after) if self.title_after else '') + + @property + def scope(self): + return (['admin'] if self.is_admin else []) + \ + (['template_editor'] if self.is_template_editor or self.is_admin else []) + \ + (['designer'] if self.is_designer or self.is_admin else []) + \ + (['user_manager'] if self.is_user_manager or self.is_admin else []) + \ + (['guest'] if self.is_guest and not self.is_admin else []) + \ + (['system'] if self.is_system_user else []) + + @property + def can_login_local(self): + return self.password and self.has_usable_password() + + @functools.cached_property + def can_login_oidc(self): + return bool(self.auth_identities.all()) + + @property + def is_admin(self): + return self.is_active and self.is_superuser and \ + getattr(self, 'admin_permissions_enabled', False) if license.is_professional() else True + + +class AuthIdentity(BaseModel): + user = models.ForeignKey(to=PentestUser, on_delete=models.CASCADE, related_name='auth_identities') + provider = models.CharField(max_length=255) + identifier = models.CharField(max_length=255) + + class Meta(BaseModel.Meta): + unique_together = ['provider', 'identifier'] + + +class Session(AbstractBaseSession): + session_key = EncryptedField(base_field=models.CharField(_("session key"), max_length=40)) + session_data = EncryptedField(base_field=models.TextField(_("session data"))) + + session_key_hash = models.BinaryField(max_length=32, primary_key=True) + + objects = querysets.SessionManager() + + def save(self, *args, **kwargs) -> None: + self.session_key_hash = self.hash_session_key(self.session_key) + return super().save(*args, **kwargs) + + @classmethod + def get_session_store_class(cls): + from reportcreator_api.users.backends.session import SessionStore + return SessionStore + + @classmethod + def hash_session_key(cls, session_key) -> bytes: + return hmac.new(key=settings.SECRET_KEY.encode(), msg=session_key.encode(), digestmod='sha3_256').digest() + + +class MFAMethodType(models.TextChoices): + TOTP = 'totp', _('TOTP') + FIDO2 = 'fido2', _('FIDO2') + BACKUP = 'backup', _('Backup codes') + + +class MFAMethod(BaseModel): + user = models.ForeignKey(to=PentestUser, on_delete=models.CASCADE, related_name='mfa_methods') + method_type = models.CharField(max_length=255, choices=MFAMethodType.choices) + is_primary = models.BooleanField(default=False) + name = models.CharField(max_length=255, default="", blank=True) + data = EncryptedField(base_field=models.JSONField()) + + objects = querysets.MFAMethodManager() + + def get_totp_qrcode(self): + if self.method_type != MFAMethodType.TOTP: + return None + + totp = pyotp.TOTP(name=self.user.username, issuer=settings.MFA_SERVER_NAME, **self.data) + img = qrcode.make(totp.provisioning_uri(), image_factory=qrcode.image.pil.PilImage) + buf = BytesIO() + img.save(buf, format='PNG') + img.close() + return 'data:image/png;base64,' + b64encode(buf.getvalue()).decode() + + def verify_code(self, code): + if self.method_type == MFAMethodType.BACKUP: + if code in self.data.get('backup_codes', []): + self.data['backup_codes'].remove(code) + self.save() + return True + return False + elif self.method_type == MFAMethodType.TOTP: + totp = pyotp.TOTP(**self.data) + return totp.verify(code, valid_window=1) + return False + + @classmethod + def get_fido2_server(cls): + rp_id = settings.MFA_FIDO2_RP_ID + + def verify_origin(origin): + if not settings.MFA_FIDO2_RP_ID: + raise ValueError('The setting MFA_FIDO2_RP_ID is not configured. Set it to your hostname that you use to access your installation e.g. "sysreptor.example.com"') + + # Do not require HTTPS for localhost + url = urlparse(origin) + if rp_id == 'localhost': + return url.hostname == rp_id + return _verify_origin_for_rp(rp_id)(origin) + + return Fido2Server( + rp=PublicKeyCredentialRpEntity(id=rp_id, name=settings.MFA_SERVER_NAME), + verify_origin=verify_origin, + ) diff --git a/api/src/reportcreator_api/users/permissions.py b/api/src/reportcreator_api/users/permissions.py new file mode 100644 index 0000000..429e70a --- /dev/null +++ b/api/src/reportcreator_api/users/permissions.py @@ -0,0 +1,90 @@ +from datetime import datetime +from django.conf import settings +from django.utils import timezone +from rest_framework import permissions, authentication, exceptions + +from reportcreator_api.users.models import PentestUser +from reportcreator_api.utils import license + + +def check_sensitive_operation_timeout(request): + """ + Check if the current session was fully authenticated (password + MFA) before a short period of time (settings.SENSITIVE_OPERATION_REAUTHENTICATION_TIMEOUT). + """ + try: + reauth_time = datetime.fromisoformat(request.session.get('authentication_info', {}).get('reauth_time')) + if reauth_time + settings.SENSITIVE_OPERATION_REAUTHENTICATION_TIMEOUT >= timezone.now(): + return True + except (ValueError, TypeError): + pass + raise exceptions.PermissionDenied(detail='Autentication timeout for sensitive operation. Log in again.', code='reauth-required') + + +class UserViewSetPermissions(permissions.BasePermission): + def has_permission(self, request, view): + if request.method in permissions.SAFE_METHODS: + return True + if view.action == 'destroy': + return request.user.is_admin or request.user.is_user_manager + elif view.action == 'self': + # Allow updating your own user + return True + elif view.action == 'change_password': + return check_sensitive_operation_timeout(request) + elif view.action == 'enable_admin_permissions': + return license.ProfessionalLicenseRequired().has_permission(request, view) and request.user.is_superuser and check_sensitive_operation_timeout(request) + elif view.action == 'disable_admin_permissions': + return license.ProfessionalLicenseRequired().has_permission(request, view) and request.user.is_admin + return request.user.is_user_manager or request.user.is_admin + + def has_object_permission(self, request, view, obj): + if request.method in permissions.SAFE_METHODS: + return True + if obj.is_system_user and obj != request.user: + return False + if view.action in ['reset_password', 'destroy']: + if obj.is_superuser and not request.user.is_admin: + # Prevent user_managers from resetting superuser password + # This would be a privilege escalation + return False + if view.action == 'destroy' and request.user == obj: + # Prevent deleting yourself + return False + return True + + +class MFAMethodViewSetPermissons(permissions.BasePermission): + def has_permission(self, request, view): + user = view.get_user() + + if user == request.user: + check_sensitive_operation_timeout(request) + return True + + if not request.user.is_admin and not request.user.is_user_manager: + return False + if request.method in permissions.SAFE_METHODS: + return True + if view.action not in ['list', 'retrieve', 'destroy']: + return False + if request.user.is_user_manager and user.is_superuser: + return False + return True + + +class AuthIdentityViewSetPermissions(permissions.BasePermission): + def has_permission(self, request, view): + user = view.get_user() + if request.method in permissions.SAFE_METHODS: + return request.user.is_admin or request.user.is_user_manager or user == request.user + else: + if user.is_system_user: + return False + return request.user.is_admin or (request.user.is_user_manager and not user.is_superuser) + + +class MFALoginInProgressAuthentication(authentication.BaseAuthentication): + def authenticate(self, request): + if user_id := request.session.get('login_state', {}).get('user_id'): + return PentestUser.objects.get(id=user_id), None + diff --git a/api/src/reportcreator_api/users/querysets.py b/api/src/reportcreator_api/users/querysets.py new file mode 100644 index 0000000..54cf1f2 --- /dev/null +++ b/api/src/reportcreator_api/users/querysets.py @@ -0,0 +1,172 @@ +import pyotp +from fido2.server import Fido2Server, AttestedCredentialData +from fido2.webauthn import PublicKeyCredentialRpEntity, PublicKeyCredentialUserEntity, UserVerificationRequirement, AuthenticatorAttachment, \ + AttestationObject, CollectedClientData +from fido2.utils import websafe_encode, websafe_decode +from django.conf import settings +from django.db import models +from django.contrib.sessions.base_session import BaseSessionManager +from django.contrib.auth.models import UserManager +from django.utils.crypto import get_random_string + + +class SessionQueryset(models.QuerySet): + def filter(self, **kwargs): + from reportcreator_api.users.models import Session + if 'session_key' in kwargs: + kwargs['session_key_hash'] = Session.hash_session_key(kwargs['session_key']) + del kwargs['session_key'] + return super().filter(**kwargs) + + +class SessionManager(BaseSessionManager, models.Manager.from_queryset(SessionQueryset)): + use_in_migrations = True + + def save(self, session_key, session_dict, expire_date): + from reportcreator_api.users.models import Session + + s = Session( + session_key=session_key, + session_data=self.encode(session_dict), + expire_date=expire_date + ) + if session_dict: + s.save() + else: + s.delete() # Clear sessions with no data. + return s + + +class PentestUserQuerySet(models.QuerySet): + def only_active(self): + return self.filter(is_active=True) + + def only_permitted(self, user): + from reportcreator_api.users.models import PentestUser + if user.is_guest: + # Only show users that are members in projects where the guest user is also a member + return self \ + .filter( + models.Q(pk=user.pk) | + models.Q(pk__in=PentestUser.objects.filter(projectmemberinfo__project__members__user=user))) + else: + return self + + def annotate_mfa_enabled(self): + from reportcreator_api.users.models import MFAMethod + return self \ + .annotate(is_mfa_enabled=models.Exists(MFAMethod.objects.filter(user=models.OuterRef('pk')))) + + def annotate_has_public_keys(self): + from reportcreator_api.pentests.models import UserPublicKey + return self \ + .annotate(has_public_keys=models.Exists(UserPublicKey.objects.only_enabled().filter(user=models.OuterRef('pk')))) + + def only_with_public_keys(self): + return self \ + .annotate_has_public_keys() \ + .filter(has_public_keys=True) + + def get_licensed_user_count(self): + return self \ + .only_active() \ + .exclude(is_system_user=True) \ + .count() + +class PentestUserManager(UserManager, models.Manager.from_queryset(PentestUserQuerySet)): + pass + + +class MFAMethodQuerySet(models.QuerySet): + def only_permitted(self, user): + if user.is_admin or user.is_user_manager: + return self + return self.filter(user=user) + + def default_order(self): + from reportcreator_api.users.models import MFAMethodType + return self \ + .annotate(method_type_order=models.Case( + models.When(models.Q(method_type=MFAMethodType.FIDO2), then=1), + models.When(models.Q(method_type=MFAMethodType.TOTP), then=2), + models.When(models.Q(method_type=MFAMethodType.BACKUP), then=3), + default=4 + )) \ + .order_by('-is_primary', 'method_type_order', 'created') + + +class MFAMethodManager(models.Manager.from_queryset(MFAMethodQuerySet)): + def create_backup(self, save=True, **kwargs): + from reportcreator_api.users.models import MFAMethod, MFAMethodType + kwargs |= { + 'method_type': MFAMethodType.BACKUP, + 'data': { + 'backup_codes': [ + get_random_string(length=12) for _ in range(10) + ] + } + } + out = MFAMethod(**kwargs) + if save: + out.save() + return out + + def create_totp(self, save=True, **kwargs): + from reportcreator_api.users.models import MFAMethod, MFAMethodType + totp = pyotp.TOTP(pyotp.random_base32()) + kwargs |= { + 'method_type': MFAMethodType.TOTP, + 'data': { + 's': totp.secret, + 'digits': totp.digits, + 'interval': totp.interval, + } + } + out = MFAMethod(**kwargs) + if save: + out.save() + return out + + def get_fido2_user_credentials(self, user): + from reportcreator_api.users.models import MFAMethodType + fido2_methods = self.filter(user=user) \ + .filter(method_type=MFAMethodType.FIDO2) + return [AttestedCredentialData(websafe_decode(m.data['device'])) for m in fido2_methods] + + def create_fido2_begin(self, user, **kwargs): + from reportcreator_api.users.models import MFAMethod, MFAMethodType + server = MFAMethod.get_fido2_server() + options, state = server.register_begin( + user=PublicKeyCredentialUserEntity( + id=str(user.id).encode(), + name=user.username, + display_name=user.username, + ), + credentials=self.get_fido2_user_credentials(user), + user_verification=UserVerificationRequirement.PREFERRED, + authenticator_attachment=AuthenticatorAttachment.CROSS_PLATFORM + ) + + kwargs |= { + 'method_type': MFAMethodType.FIDO2, + 'data': { + 'options': dict(options), + 'state': state, + }, + } + return MFAMethod(**kwargs) + + def create_fido2_complete(self, instance, response, save=True): + from reportcreator_api.users.models import MFAMethod + server = MFAMethod.get_fido2_server() + auth_data = server.register_complete( + state=instance.data.get('state'), + response=response + ) + instance.data = { + 'device': websafe_encode(auth_data.credential_data) + } + if save: + instance.save() + return instance + diff --git a/api/src/reportcreator_api/users/serializers.py b/api/src/reportcreator_api/users/serializers.py new file mode 100644 index 0000000..a4805d6 --- /dev/null +++ b/api/src/reportcreator_api/users/serializers.py @@ -0,0 +1,220 @@ +from collections import OrderedDict +from uuid import UUID +from rest_framework import serializers +from django.contrib.auth.password_validation import validate_password +from django.contrib.auth.hashers import make_password +from django.db import transaction +from django.conf import settings +from reportcreator_api.users.models import PentestUser, MFAMethod, MFAMethodType, AuthIdentity +from reportcreator_api.utils.utils import omit_items + + +class PentestUserSerializer(serializers.ModelSerializer): + class Meta: + model = PentestUser + fields = ['id', 'username', 'name', 'title_before', 'first_name', 'middle_name', 'last_name', 'title_after', 'is_active'] + + +class PentestUserDetailSerializer(serializers.ModelSerializer): + is_mfa_enabled = serializers.SerializerMethodField() + + class Meta: + model = PentestUser + fields = [ + 'id', 'created', 'updated', 'last_login', 'is_active', + 'username', 'name', 'title_before', 'first_name', 'middle_name', 'last_name', 'title_after', + 'email', 'phone', 'mobile', + 'scope', 'is_superuser', 'is_designer', 'is_template_editor', 'is_user_manager', 'is_guest', 'is_system_user', 'is_global_archiver', + 'is_mfa_enabled', 'can_login_local', 'can_login_oidc', + ] + read_only_fields = ['is_system_user'] + + def get_is_mfa_enabled(self, obj): + if (is_mfa_enabled := getattr(obj, 'is_mfa_enabled', None)) is not None: + return is_mfa_enabled + return obj.mfa_methods.all().exists() + + def get_extra_kwargs(self): + user = self.context['request'].user + read_only = not (user.is_user_manager or user.is_admin) + return super().get_extra_kwargs() | { + 'is_superuser': {'read_only': not user.is_admin}, + 'is_user_manager': {'read_only': read_only}, + 'is_designer': {'read_only': read_only}, + 'is_template_editor': {'read_only': read_only}, + 'is_guest': {'read_only': read_only}, + 'is_global_archiver': {'read_only': read_only}, + 'username': {'read_only': read_only}, + } + + +class CreateUserSerializer(PentestUserDetailSerializer): + class Meta(PentestUserDetailSerializer.Meta): + fields = PentestUserDetailSerializer.Meta.fields + ['password'] + extra_kwargs = { + 'password': {'write_only': True, **({'required': False, 'allow_null': True, 'default': None} if settings.AUTHLIB_OAUTH_CLIENTS else {})} + } + + def validate_password(self, value): + if value is not None: + validate_password(value, user=self.instance) + return make_password(value) + + +class RelatedUserSerializer(serializers.PrimaryKeyRelatedField): + requires_context = True + + def __init__(self, user_serializer=PentestUserSerializer, **kwargs): + self.user_serializer=user_serializer + super().__init__(**kwargs) + + def get_queryset(self): + qs = PentestUser.objects.all() + if request := self.context.get('request'): + qs = qs.only_permitted(request.user) + return qs + + def use_pk_only_optimization(self): + return False + + def to_internal_value(self, data): + if isinstance(data, dict) and 'id' in data: + return self.get_queryset().get(pk=data['id']) + elif isinstance(data, (str, UUID)): + return self.get_queryset().get(pk=data) + else: + return data + + def to_representation(self, value): + return self.user_serializer(value).to_representation(value) + + def get_choices(self, cutoff=None): + queryset = self.get_queryset() + if queryset is None: + # Ensure that field.choices returns something sensible + # even when accessed with a read-only field. + return {} + + if cutoff is not None: + queryset = queryset[:cutoff] + + return OrderedDict([(str(item.pk), self.display_value(item)) for item in queryset]) + + +class ResetPasswordSerializer(serializers.ModelSerializer): + password = serializers.CharField(write_only=True) + + class Meta: + model = PentestUser + fields = ['password'] + + def validate_password(self, value): + validate_password(value, user=self.instance) + return value + + def update(self, instance, validated_data): + instance.set_password(validated_data['password']) + instance.save() + return instance + + +class MFAMethodSerializer(serializers.ModelSerializer): + class Meta: + model = MFAMethod + fields = ['id', 'method_type', 'is_primary', 'name'] + read_only_fields = ['method_type'] + + @transaction.atomic() + def update(self, instance, validated_data): + if validated_data.get('is_primary', False): + self.instance.user.mfa_methods.update(is_primary=False) + + return super().update(instance, validated_data) + + +class LoginSerializer(serializers.Serializer): + username = serializers.CharField() + password = serializers.CharField(style={'input_type': 'password'}) + + def validate(self, attrs): + try: + user = PentestUser.objects.get(username=attrs['username']) + except PentestUser.DoesNotExist: + user = PentestUser() + + if not user.check_password(attrs['password']): + raise serializers.ValidationError('Invalid username or password') + + return user + + +class MFAMethodRelatedField(serializers.PrimaryKeyRelatedField): + def get_queryset(self): + return self.context['request'].user.mfa_methods.all() + + +class LoginMFACodeSerializer(serializers.Serializer): + id = MFAMethodRelatedField() + code = serializers.CharField() + + def validate(self, attrs): + mfa_method = attrs['id'] + if not mfa_method.verify_code(attrs['code']): + raise serializers.ValidationError('Invalid code') + return mfa_method + + +class MFAMethodRegisterSerializerBase(serializers.Serializer): + @property + def method_type(self): + return None + + def validate(self, attrs): + if self.instance.method_type != self.method_type: + raise serializers.ValidationError('Invalid MFA Method') + if self.instance.user != self.context['user']: + raise serializers.ValidationError('Invalid user') + return super().validate(attrs) + + def update(self, instance, validated_data): + instance.is_primary = self.method_type != MFAMethodType.BACKUP and not instance.user.mfa_methods.filter(is_primary=True).exists() + instance.save() + return instance + + +class MFAMethodRegisterBackupCodesSerializer(MFAMethodRegisterSerializerBase): + method_type = MFAMethodType.BACKUP + + +class MFAMethodRegisterTOTPSerializer(MFAMethodRegisterSerializerBase): + method_type = MFAMethodType.TOTP + code = serializers.CharField() + + def validate(self, attrs): + if not self.instance.verify_code(attrs['code']): + raise serializers.ValidationError('Invalid code') + return attrs + + +class MFAMethodRegisterFIDO2Serializer(MFAMethodRegisterSerializerBase): + method_type = MFAMethodType.FIDO2 + + def update(self, instance, validated_data): + try: + instance = MFAMethod.objects.create_fido2_complete(instance=instance, response=self.initial_data, save=False) + except ValueError as ex: + if ex.args and len(ex.args) == 1 and isinstance(ex.args[0], str): + raise serializers.ValidationError(ex.args[0], 'fido2') from ex + else: + raise ex + return super().update(instance, validated_data) + + +class AuthIdentitySerializer(serializers.ModelSerializer): + class Meta: + model = AuthIdentity + fields = ['provider', 'identifier'] + + def create(self, validated_data): + return super().create(validated_data | {'user': self.context['user']}) + \ No newline at end of file diff --git a/api/src/reportcreator_api/users/signals.py b/api/src/reportcreator_api/users/signals.py new file mode 100644 index 0000000..8f6dc2d --- /dev/null +++ b/api/src/reportcreator_api/users/signals.py @@ -0,0 +1,40 @@ +from django.db.models import signals +from django.dispatch import receiver +from django.conf import settings + +from reportcreator_api.users.models import PentestUser +from reportcreator_api.utils import license + + +@receiver(signals.pre_save, sender=PentestUser) +def user_count_license_check(sender, instance, *args, **kwargs): + if not instance.is_active: + return + + # User created + created = instance.id is None or instance._state.adding + if created: + licensable_users = PentestUser.objects.all() + if not license.is_professional(): + licensable_users = licensable_users.filter(is_superuser=True) + current_user_count = licensable_users.get_licensed_user_count() + + max_users = license.check_license().get('users', 1) + if current_user_count + 1 > max_users: + raise license.LicenseLimitExceededError( + f'License limit exceeded. Your license allows max. {max_users} users. ' + 'Please deactivate some users or extend your license.') + + # User updated + if (created or 'is_superuser' in instance.changed_fields) and not instance.is_superuser and not license.is_professional(): + raise license.LicenseError('Can only create superusers with a Community license. A Professional license is required for user roles.') + if (created or 'is_system_user' in instance.changed_fields) and instance.is_system_user and not license.is_professional(): + raise license.LicenseError('System users are not supported with a Community licenses. A Professional license is required.') + if not created and \ + ((instance.get_field_diff('is_superuser') == (False, True) and not license.is_professional()) or \ + (instance.get_field_diff('is_active') == (False, True))): + current_superuser_count = PentestUser.objects.filter(is_superuser=True).get_licensed_user_count() + max_users = license.check_license().get('users', 1) + if current_superuser_count + 1 > max_users: + raise license.LicenseError(f'License limit exceeded. Your license allows max. {max_users} users. Please deactivate some users or extend your license.') + diff --git a/api/src/reportcreator_api/users/views.py b/api/src/reportcreator_api/users/views.py new file mode 100644 index 0000000..08a2dfc --- /dev/null +++ b/api/src/reportcreator_api/users/views.py @@ -0,0 +1,362 @@ +import functools +import json +from datetime import datetime, timedelta +from authlib.integrations.django_client import OAuth, OAuthError +from rest_framework.response import Response +from rest_framework import viewsets, status, filters, mixins, serializers, exceptions +from rest_framework.decorators import action +from rest_framework.settings import api_settings +from django_filters.rest_framework import DjangoFilterBackend +from django.conf import settings +from django.forms import model_to_dict +from django.core.serializers.json import DjangoJSONEncoder +from django.contrib.auth import login, logout +from django.shortcuts import get_object_or_404 +from django.utils import timezone + +from reportcreator_api.utils import license +from reportcreator_api.users.models import PentestUser, MFAMethod, AuthIdentity +from reportcreator_api.users.permissions import UserViewSetPermissions, MFAMethodViewSetPermissons, MFALoginInProgressAuthentication, \ + AuthIdentityViewSetPermissions +from reportcreator_api.users.serializers import CreateUserSerializer, PentestUserDetailSerializer, PentestUserSerializer, \ + ResetPasswordSerializer, MFAMethodSerializer, LoginSerializer, LoginMFACodeSerializer, MFAMethodRegisterBackupCodesSerializer, \ + MFAMethodRegisterTOTPSerializer, MFAMethodRegisterFIDO2Serializer, AuthIdentitySerializer + + +oauth = OAuth() +for name, config in settings.AUTHLIB_OAUTH_CLIENTS.items(): + oauth.register(name, **config) + + +class APIBadRequestError(exceptions.APIException): + status_code = status.HTTP_400_BAD_REQUEST + default_detail = 'Invalid input.' + default_code = 'invalid' + + +class PentestUserViewSet(viewsets.ModelViewSet): + permission_classes = api_settings.DEFAULT_PERMISSION_CLASSES + [UserViewSetPermissions] + filter_backends = [filters.SearchFilter, DjangoFilterBackend] + search_fields = ['username', 'email', 'first_name', 'last_name'] + filterset_fields = ['username', 'email'] + + def get_queryset(self): + return PentestUser.objects \ + .only_permitted(self.request.user) \ + .annotate_mfa_enabled() \ + .prefetch_related('auth_identities') + + def get_object(self): + if self.kwargs.get('pk') == 'self': + return self.request.user + return super().get_object() + + def get_serializer_class(self): + if self.action in ['change_password', 'reset_password']: + return ResetPasswordSerializer + elif self.action == 'create': + return CreateUserSerializer + elif self.request.user.is_admin or self.request.user.is_user_manager or self.action in ['self', 'enable_admin_permissions', 'disable_admin_permissions']: + return PentestUserDetailSerializer + else: + return PentestUserSerializer + + @action(detail=False, methods=['get', 'put', 'patch']) + def self(self, request, *args, **kwargs): + self.kwargs['pk'] = 'self' + if request.method == 'PUT': + return self.update(request, *args, **kwargs) + elif request.method == 'PATCH': + return self.partial_update(request, *args, **kwargs) + else: + return self.retrieve(request, *args, **kwargs) + + @action(detail=False, url_path='self/change-password', methods=['post']) + def change_password(self, request, *args, **kwargs): + self.kwargs['pk'] = 'self' + return self.update(request, *args, **kwargs) + + @action(detail=False, url_path='self/admin/enable', methods=['post']) + def enable_admin_permissions(self, request, *args, **kwargs): + request.session['admin_permissions_enabled'] = True + request.session.cycle_key() + request.user.admin_permissions_enabled = True + self.kwargs['pk'] = 'self' + return self.retrieve(request=request, *args, **kwargs) + + @action(detail=False, url_path='self/admin/disable', methods=['post']) + def disable_admin_permissions(self, request, *args, **kwargs): + request.session.pop('admin_permissions_enabled', False) + request.session.cycle_key() + request.user.admin_permissions_enabled = False + self.kwargs['pk'] = 'self' + return self.retrieve(request=request, *args, **kwargs) + + @action(detail=True, url_path='reset-password', methods=['post']) + def reset_password(self, request, *args, **kwargs): + return self.update(request, *args, **kwargs) + + +class MFAMethodViewSet(mixins.ListModelMixin, mixins.RetrieveModelMixin, mixins.UpdateModelMixin, mixins.DestroyModelMixin, viewsets.GenericViewSet): + permission_classes = api_settings.DEFAULT_PERMISSION_CLASSES + [MFAMethodViewSetPermissons] + pagination_class = None + + @functools.cache + def get_user(self): + user_pk = self.kwargs['pentestuser_pk'] + if user_pk == 'self': + return self.request.user + + qs = PentestUser.objects.all() + return get_object_or_404(qs, pk=user_pk) + + def get_queryset(self): + return MFAMethod.objects \ + .only_permitted(self.request.user) \ + .filter(user=self.get_user()) \ + .default_order() + + def get_serializer_class(self): + if self.action in ['register_backup_begin', 'register_totp_begin', 'register_fido2_begin']: + return serializers.Serializer + elif self.action == 'register_backup_complete': + return MFAMethodRegisterBackupCodesSerializer + elif self.action == 'register_totp_complete': + return MFAMethodRegisterTOTPSerializer + elif self.action == 'register_fido2_complete': + return MFAMethodRegisterFIDO2Serializer + return MFAMethodSerializer + + def get_serializer_context(self): + return super().get_serializer_context() | { + 'user': self.get_user() + } + + @action(detail=False, url_path='register/backup/begin', methods=['post']) + def register_backup_begin(self, request, *args, **kwargs): + # if self.get_user().mfa_methods.filter(method_type=MFAMethodType.BACKUP).exists(): + # raise APIBadRequestError('Backup codes already exist') + + instance = MFAMethod.objects.create_backup(save=False, user=self.get_user(), name='Backup Codes') + return self.perform_register_begin(request, instance) + + @action(detail=False, url_path='register/totp/begin', methods=['post']) + def register_totp_begin(self, request, *args, **kwargs): + instance = MFAMethod.objects.create_totp(save=False, user=self.get_user(), name='TOTP') + return self.perform_register_begin(request, instance, {'qrcode': instance.get_totp_qrcode()}) + + @action(detail=False, url_path='register/fido2/begin', methods=['post']) + def register_fido2_begin(self, request, *args, **kwargs): + serializer = self.get_serializer(data=request.data) + serializer.is_valid(raise_exception=True) + instance = MFAMethod.objects.create_fido2_begin(user=self.get_user(), name='Security Key') + return self.perform_register_begin(request, instance, {'state': None}) + + def perform_register_begin(self, request, instance, additional_response_data={}): + request.session['mfa_register'] = json.dumps(model_to_dict(instance), cls=DjangoJSONEncoder) + response_data = instance.data | additional_response_data + return Response(response_data, status=status.HTTP_200_OK) + + @action(detail=False, url_path='register/backup/complete', methods=['post']) + def register_backup_complete(self, *args, **kwargs): + return self.register_complete(*args, **kwargs) + + @action(detail=False, url_path='register/totp/complete', methods=['post']) + def register_totp_complete(self, *args, **kwargs): + return self.register_complete(*args, **kwargs) + + @action(detail=False, url_path='register/fido2/complete', methods=['post']) + def register_fido2_complete(self, *args, **kwargs): + return self.register_complete(*args, **kwargs) + + def register_complete(self, request, *args, **kwargs): + if not request.session.get('mfa_register'): + raise APIBadRequestError('No MFA registration in progress') + mfa_register_state = json.loads(request.session['mfa_register']) + mfa_register_state['user'] = self.get_user() + instance = MFAMethod(**mfa_register_state) + + serializer = self.get_serializer(instance=instance, data=request.data) + serializer.is_valid(raise_exception=True) + instance = serializer.save() + + del request.session['mfa_register'] + return Response(MFAMethodSerializer(instance=instance).data, status=status.HTTP_201_CREATED) + + +class AuthIdentityViewSet(viewsets.ModelViewSet): + serializer_class = AuthIdentitySerializer + permission_classes = api_settings.DEFAULT_PERMISSION_CLASSES + [AuthIdentityViewSetPermissions, license.ProfessionalLicenseRequired] + pagination_class = None + + @functools.cache + def get_user(self): + qs = PentestUser.objects.all() + return get_object_or_404(qs, pk=self.kwargs['pentestuser_pk']) + + def get_queryset(self): + return self.get_user().auth_identities.all() + + def get_serializer_context(self): + return super().get_serializer_context() | { + 'user': self.get_user() + } + + +class AuthViewSet(viewsets.ViewSet): + authentication_classes = [] + permission_classes = [] + + def get_serializer_class(self): + if self.action == 'login': + return LoginSerializer + elif self.action == 'login_code': + return LoginMFACodeSerializer + else: + return serializers.Serializer + + def get_serializer(self, *args, **kwargs): + return self.get_serializer_class()(context={'request': self.request}, *args, **kwargs) + + @action(detail=False, methods=['post'], authentication_classes=[]) + def login(self, request, *args, **kwargs): + serializer = self.get_serializer(data=request.data) + serializer.is_valid(raise_exception=True) + user = serializer.validated_data + + mfa_methods = list(user.mfa_methods.all().default_order()) + if not mfa_methods: + # MFA disabled + return self.perform_login(request, user) + else: + request.session['login_state'] = request.session.get('login_state', {}) | { + 'status': 'mfa-required', + 'user_id': str(user.id), + 'start': timezone.now().isoformat(), + } + return Response({ + 'status': 'mfa-required', + 'mfa': MFAMethodSerializer(mfa_methods, many=True).data, + }, status=200) + + @action(detail=False, methods=['post'], authentication_classes=api_settings.DEFAULT_AUTHENTICATION_CLASSES) + def logout(self, request, *args, **kwargs): + logout(request=request) + return Response(status=status.HTTP_204_NO_CONTENT) + + @action(detail=False, url_path='login/code', methods=['post'], authentication_classes=[MFALoginInProgressAuthentication]) + def login_code(self, request, *args, **kwargs): + self._verify_mfa_preconditions(request) + + serializer = self.get_serializer(data=request.data) + serializer.is_valid(raise_exception=True) + return self.perform_login(request, request.user) + + @action(detail=False, url_path='login/fido2/begin', methods=['post'], authentication_classes=[MFALoginInProgressAuthentication]) + def login_fido2_begin(self, request, *args, **kwargs): + self._verify_mfa_preconditions(request) + + credentials = MFAMethod.objects.get_fido2_user_credentials(request.user) + if not credentials: + raise APIBadRequestError('No FIDO2 devices registered') + options, state = MFAMethod.get_fido2_server().authenticate_begin(credentials=credentials) + request.session['login_state'] |= {'fido2_state': state} + return Response(dict(options), status=status.HTTP_200_OK) + + @action(detail=False, url_path='login/fido2/complete', methods=['post'], authentication_classes=[MFALoginInProgressAuthentication]) + def login_fido2_complete(self, request, *args, **kwargs): + self._verify_mfa_preconditions(request) + state = request.session.get('login_state', {}).pop('fido2_state', None) + try: + MFAMethod.get_fido2_server().authenticate_complete( + state=state, + credentials=MFAMethod.objects.get_fido2_user_credentials(request.user), + response=request.data + ) + except ValueError as ex: + if ex.args and len(ex.args) == 1 and isinstance(ex.args[0], str): + raise serializers.ValidationError(ex.args[0], 'fido2') from ex + else: + raise ex + return self.perform_login(request, request.user) + + def _verify_mfa_preconditions(self, request): + login_state = request.session.get('login_state', {}) + if login_state.get('status') != 'mfa-required': + raise APIBadRequestError('MFA login not allowed') + elif datetime.fromisoformat(login_state.get('start')) + settings.MFA_LOGIN_TIMEOUT < timezone.now(): + raise APIBadRequestError('Login timeout. Please restart login.') + + def perform_login(self, request, user, can_reauth=True): + if not license.is_professional() and not user.is_superuser: + raise license.LicenseError('Only superusers are allowed to login. A Professional licenese is required to enable user roles.') + elif not license.is_professional() and user.is_system_user: + raise license.LicenseError('System users are disabled. A Professional license is required to use system users.') + + request.session.pop('login_state', None) + first_login = not user.last_login + is_reauth = bool(request.session.get('authentication_info', {}).get('login_time')) and str(user.id) == request.session.get('_auth_user_id') + if is_reauth and can_reauth: + request.session['authentication_info'] |= { + 'reauth_time': timezone.now().isoformat(), + } + request.session.cycle_key() + else: + request.session['authentication_info'] = request.session.get('authentication_info', {}) | { + 'login_time': timezone.now().isoformat(), + } + login(request=self.request, user=user) + return Response({ + 'status': 'success', + 'first_login': first_login, + }, status=status.HTTP_200_OK) + + @action(detail=False, url_path='login/oidc/(?P[a-zA-Z0-9]+)/begin', methods=['get'], permission_classes=[license.ProfessionalLicenseRequired]) + def login_oidc_begin(self, request, oidc_provider, *args, **kwargs): + if oidc_provider not in settings.AUTHLIB_OAUTH_CLIENTS: + raise APIBadRequestError(f'OIDC provider "{oidc_provider}" not supported') + + request.session['login_state'] = { + 'status': 'oidc-callback-required', + 'start': timezone.now().isoformat(), + } + redirect_uri = request.build_absolute_uri(f'/login/oidc/{oidc_provider}/callback') + redirect_kwargs = {} + if request.GET.get('reauth'): + redirect_kwargs |= { + 'prompt': 'login', + 'max_age': 0 + } + if login_hint := request.session.get('authentication_info', {}).get(f'oidc_{oidc_provider}_login_hint'): + redirect_kwargs |= {'login_hint': login_hint} + + return oauth.create_client(oidc_provider).authorize_redirect(request, redirect_uri, **redirect_kwargs) + + @action(detail=False, url_path='login/oidc/(?P[a-zA-Z0-9]+)/complete', methods=['get'], permission_classes=[license.ProfessionalLicenseRequired]) + def login_oidc_complete(self, request, oidc_provider, *args, **kwargs): + if not request.session.get('login_state', {}).get('status') == 'oidc-callback-required': + raise APIBadRequestError('No OIDC login in progress for session') + + try: + token = oauth.create_client(oidc_provider).authorize_access_token(request) + except OAuthError as ex: + raise exceptions.AuthenticationFailed(detail=ex.description, code=ex.error) + + identity = AuthIdentity.objects \ + .select_related('user') \ + .filter(provider=oidc_provider) \ + .filter(identifier=token['userinfo'].get('email')) \ + .first() + if not identity: + raise exceptions.AuthenticationFailed() + + can_reauth = False + if (auth_time := token['userinfo'].get('auth_time')): + can_reauth &= (timezone.now() - timezone.make_aware(datetime.fromtimestamp(auth_time))) < timedelta(minutes=1) + res = self.perform_login(request, identity.user, can_reauth=can_reauth) + request.session['authentication_info'] |= { + f'oidc_{oidc_provider}_login_hint': token['userinfo'].get('login_hint'), + } + return res + + diff --git a/api/src/reportcreator_api/utils/admin.py b/api/src/reportcreator_api/utils/admin.py new file mode 100644 index 0000000..8545818 --- /dev/null +++ b/api/src/reportcreator_api/utils/admin.py @@ -0,0 +1,33 @@ + +from urllib.parse import urlencode, urlunsplit + +from django.contrib import admin +from django.urls import reverse +from django.utils.html import format_html + + +class BaseAdmin(admin.ModelAdmin): + date_hierarchy = 'created' + ordering = ['-created'] + + def get_readonly_fields(self, request, obj): + readonly_fields = super().get_readonly_fields(request, obj) + return readonly_fields + tuple(set([f for f in dir(self) if f.startswith('link_')]).difference(readonly_fields)) + + +def admin_url(label, app_name, model_name, type_name, params=None, *args, **kwargs): + admin_url_query = '' + if params: + admin_url_query = urlencode(params) + + admin_path = reverse('admin:%s_%s_%s' % (app_name, model_name, type_name), args=args, kwargs=kwargs) + admin_url = urlunsplit(['', '', admin_path, admin_url_query, '']) + return format_html('{}', admin_url, label) + + +def admin_change_url(label, app_name, model_name, object_id, params=None): + return admin_url(label, app_name, model_name, 'change', params, object_id) + + +def admin_changelist_url(label, app_name, model_name, params=None): + return admin_url(label, app_name, model_name, 'changelist', params) \ No newline at end of file diff --git a/api/src/reportcreator_api/utils/api.py b/api/src/reportcreator_api/utils/api.py new file mode 100644 index 0000000..c090123 --- /dev/null +++ b/api/src/reportcreator_api/utils/api.py @@ -0,0 +1,79 @@ +from asgiref.sync import sync_to_async +from django.http import FileResponse, Http404 +from django.core.exceptions import PermissionDenied +from adrf.views import APIView as AsyncAPIView +from rest_framework import exceptions, status, views, generics, viewsets +from rest_framework.response import Response + +from reportcreator_api.utils import license + + +class GenericAPIViewAsync(generics.GenericAPIView, AsyncAPIView): + _action = None + + @property + def action(self): + return self._action + + @action.setter + def action(self, value): + self._action = value + + async def aget_valid_serializer(self, *args, **kwargs): + serializer = self.get_serializer(*args, **kwargs) + await sync_to_async(serializer.is_valid)(raise_exception=True) + return serializer + + async def aget_object(self): + return await sync_to_async(super().get_object)() + + +class FileResponseAsync(FileResponse): + async def to_async_iterator(self, sync_iter): + for chunk in await sync_to_async(list)(sync_iter): + yield chunk + + def _set_streaming_content(self, value): + if not hasattr(value, "read"): + self.file_to_stream = None + return super()._set_streaming_content(self.to_async_iterator(value)) + + self.file_to_stream = filelike = value + if hasattr(filelike, "close"): + self._resource_closers.append(filelike.close) + value = iter(lambda: filelike.read(self.block_size), b"") + self.set_headers(filelike) + super()._set_streaming_content(self.to_async_iterator((value))) + + +def exception_handler(exc, context): + """ + Returns the response that should be used for any given exception. + By default we handle the REST framework `APIException`, and also + Django's built-in `Http404` and `PermissionDenied` exceptions. + Any unhandled exceptions may return `None`, which will cause a 500 error + to be raised. + """ + if isinstance(exc, Http404): + exc = exceptions.NotFound(*(exc.args)) + elif isinstance(exc, PermissionDenied): + exc = exceptions.PermissionDenied(*(exc.args)) + elif isinstance(exc, license.LicenseError): + exc = exceptions.PermissionDenied(detail=exc.detail, code='license') + + if isinstance(exc, exceptions.APIException): + headers = {} + if getattr(exc, 'auth_header', None): + headers['WWW-Authenticate'] = exc.auth_header + if getattr(exc, 'wait', None): + headers['Retry-After'] = '%d' % exc.wait + + if isinstance(exc.detail, (list, dict)): + data = exc.detail + else: + data = {'detail': exc.detail, 'code': exc.detail.code} + + views.set_rollback() + return Response(data, status=exc.status_code, headers=headers) + + return None \ No newline at end of file diff --git a/api/src/reportcreator_api/utils/decorators.py b/api/src/reportcreator_api/utils/decorators.py new file mode 100644 index 0000000..6b7a566 --- /dev/null +++ b/api/src/reportcreator_api/utils/decorators.py @@ -0,0 +1,16 @@ +from django.core.cache import cache as django_cache + + +def cache(key, **cache_kwargs): + def inner(func): + def wrapped(*args, **kwargs): + val = django_cache.get(key) + if val is not None: + return val + else: + val = func(*args, **kwargs) + django_cache.set(key=key, value=val, **cache_kwargs) + return val + return wrapped + return inner + diff --git a/api/src/reportcreator_api/utils/error_messages.py b/api/src/reportcreator_api/utils/error_messages.py new file mode 100644 index 0000000..c247a86 --- /dev/null +++ b/api/src/reportcreator_api/utils/error_messages.py @@ -0,0 +1,69 @@ +import dataclasses +import enum +from typing import Optional, Union + + +class MessageLevel(enum.Enum): + ERROR = 'error' + WARNING = 'warning' + INFO = 'info' + DEBUG = 'debug' + + +class MessageLocationType(enum.Enum): + FINDING = 'finding' + PROJECT = 'project' + SECTION = 'section' + DESIGN = 'design' + OTHER = 'other' + + +def format_path(path: Union[None, str, tuple[str], list[str]]): + path_str = path + if isinstance(path, (tuple, list)): + path_str = '' + for p in path: + if path_str and p and p[0] != '[': + path_str += '.' + path_str += p + return path_str + + +@dataclasses.dataclass +class MessageLocationInfo: + type: MessageLocationType + id: Optional[str] = None + name: Optional[str] = None + path: Optional[str] = None + + def sub_path(self, sub_path: str): + path = self.path or '' + if sub_path.startswith('[') or not path: + path += sub_path + else: + path += '.' + sub_path + return MessageLocationInfo(**(dataclasses.asdict(self) | {'path': path})) + + def for_path(self, path: Union[None, str, tuple[str], list[str]]): + return MessageLocationInfo(**(dataclasses.asdict(self) |{'path': format_path(path)})) + + +@dataclasses.dataclass +class ErrorMessage: + level: MessageLevel + location: MessageLocationInfo + message: str + details: Optional[str] = None + + def to_dict(self): + return dataclasses.asdict(self) | { + 'level': self.level.value, + 'location': dataclasses.asdict(self.location) | { + 'type': self.location.type.value, + }, + } + + +def format_messages(lst: list[ErrorMessage]): + return {l.value: [e.to_dict() for e in lst if e.level == l] for l in list(MessageLevel)} + diff --git a/api/src/reportcreator_api/utils/files.py b/api/src/reportcreator_api/utils/files.py new file mode 100644 index 0000000..c9d5cfc --- /dev/null +++ b/api/src/reportcreator_api/utils/files.py @@ -0,0 +1,92 @@ +import string +import io +import logging +from django.conf import settings +from pathlib import Path +from PIL import Image, ImageOps, UnidentifiedImageError +from django.core.files.base import ContentFile, File +from reportcreator_api.utils.logging import log_timing + + +log = logging.getLogger(__name__) + + +def normalize_filename(name): + """ + Normalize filename: strip special characters that might conflict with markdown syntax + """ + out = '' + for c in name: + if c in string.ascii_letters + string.digits + '-.': + out += c + else: + out += '-' + if Path(name).parent.parts: + out = Path(name).parts[-1] + if all(map(lambda c: c == '.', out)): + out = 'file' + return out + + +def image_contains_transparent_pixels(img): + if 'A' not in img.getbands(): + return False + + a_band_index = img.getbands().index('A') + return any(map(lambda d: d[a_band_index] != 255, img.getdata())) + + +@log_timing +def compress_image(file, name=None): + """ + Compress image files and convert the to JPEG. + If the file is not an image or a SVG file, return it as-is without compressing or converting it. + """ + if not settings.COMPRESS_IMAGES: + return file, name + + try: + with Image.open(file) as img: + img_format = img.format + if img_format == 'SVG': + raise UnidentifiedImageError('Do not compress SVG') + + # resize image to a max size + img.thumbnail(size=(2000, 2000), resample=Image.Resampling.LANCZOS) + + # Ensure the image is correctly rotated (not rotated via EXIF info) + img = ImageOps.exif_transpose(img) + + if img.mode not in ['RGB', 'RGBA']: + img = img.convert('RGBA') + + # Check if image uses transparency + out = io.BytesIO() + if img_format in ['PNG', 'GIF'] or image_contains_transparent_pixels(img): + # Convert to PNG and reduce quality + img.save(out, format='PNG', optimize=True) + file_extension = '.png' + else: + # Convert to JPEG and reduce quality + img = img.convert('RGB') + img.save(out, format='JPEG', quality=75, optimize=True) + file_extension = '.jpg' + + # Change extension in filename + name = name or getattr(file, 'name', None) + if name: + name_path = Path(name) + if name_path.suffix: + name = name[:-len(name_path.suffix)] + file_extension + + if isinstance(file, File): + return ContentFile(content=out.getvalue(), name=name or file.name), name + else: + out.seek(0) + return out, name + except Exception as ex: + if not isinstance(ex, UnidentifiedImageError): + log.exception('Image compression error') + file.seek(0) + return file, name + diff --git a/api/src/reportcreator_api/utils/license.py b/api/src/reportcreator_api/utils/license.py new file mode 100644 index 0000000..a0bb7e9 --- /dev/null +++ b/api/src/reportcreator_api/utils/license.py @@ -0,0 +1,123 @@ + +import base64 +import json +import logging +from Cryptodome.Hash import SHA512 +from Cryptodome.PublicKey import ECC +from Cryptodome.Signature import eddsa +from django.conf import settings +from django.db import models +from django.utils import dateparse, timezone +from rest_framework import permissions + +from reportcreator_api.utils.decorators import cache + + +class LicenseError(Exception): + def __init__(self, detail: str) -> None: + super().__init__(detail) + self.detail = detail + + +class LicenseLimitExceededError(LicenseError): + pass + + +class LicenseType(models.TextChoices): + COMMUNITY = 'community', 'Community' + PROFESSIONAL = 'professional', 'Professional' + + +class ProfessionalLicenseRequired(permissions.BasePermission): + def has_permission(self, request, view): + if not is_professional(): + raise LicenseError('Professional license required') + return True + + +def verify_signature(data: str, signature: dict): + public_key = next(filter(lambda k: k['id'] == signature['key_id'], settings.LICENSE_VALIDATION_KEYS), None) + if not public_key: + return False + if public_key['algorithm'] != signature['algorithm'] or signature['algorithm'] != 'ed25519': + return False + + try: + verifier = eddsa.new(key=ECC.import_key(base64.b64decode(public_key['key'])), mode='rfc8032') + verifier.verify(msg_or_hash=SHA512.new(data.encode()), signature=base64.b64decode(signature['signature'])) + return True + except Exception: + return False + + +def parse_date(s): + out = dateparse.parse_date(s) + if out is None: + raise ValueError() + return out + + +def decode_license(license): + try: + license_wrapper = json.loads(base64.b64decode(license)) + for signature in license_wrapper['signatures']: + if verify_signature(license_wrapper['data'], signature): + license_data = json.loads(license_wrapper['data']) + license_data['valid_from'] = parse_date(license_data['valid_from']) + license_data['valid_until'] = parse_date(license_data['valid_until']) + if not isinstance(license_data['users'], int) or license_data['users'] <= 0: + raise LicenseError('Invalid user count in license') + return license_data + else: + raise LicenseError('No valid signature found for license') + except LicenseError: + raise + except Exception as ex: + raise LicenseError('Failed to load license: Invalid format.') from ex + + +def decode_and_validate_license(license): + from reportcreator_api.users.models import PentestUser + + try: + if not license: + raise LicenseError(None) + + # Validate license + license_data = decode_license(license) + period_info = f"The license is valid from {license_data['valid_from'].isoformat()} until {license_data['valid_until'].isoformat()}" + if license_data['valid_from'] > timezone.now().date(): + raise LicenseError('License not yet valid: ' + period_info) + elif license_data['valid_until'] < timezone.now().date(): + raise LicenseError('License expired: ' + period_info) + + # Validate license limits not exceeded + current_user_count = PentestUser.objects.get_licensed_user_count() + if current_user_count > license_data['users']: + raise LicenseError( + f"License limit exceeded: You licensed max. {license_data['users']} users, but have currently {current_user_count} active users. " + "Falling back to the free license. Please deactivate some users or extend your license.") + + # All license checks are valid + return { + 'type': LicenseType.PROFESSIONAL, + 'error': None, + } | license_data + except LicenseError as ex: + if license: + logging.exception('License validation failed') + return { + 'type': LicenseType.COMMUNITY, + 'users': settings.LICENSE_COMMUNITY_MAX_USERS, + 'error': ex.detail, + } + + +@cache('license.license_info', timeout=10 * 60) +def check_license(): + return decode_and_validate_license(settings.LICENSE) + + +def is_professional(): + return check_license().get('type', LicenseType.COMMUNITY) == LicenseType.PROFESSIONAL + diff --git a/api/src/reportcreator_api/utils/logging.py b/api/src/reportcreator_api/utils/logging.py new file mode 100644 index 0000000..094e954 --- /dev/null +++ b/api/src/reportcreator_api/utils/logging.py @@ -0,0 +1,36 @@ +import logging +from django.utils import timezone, deprecation +from django.utils.functional import wraps + + +log = logging.getLogger() + + +def log_timing(fn): + @wraps(fn) + def inner(*args, **kwargs): + start_time = timezone.now() + out = fn(*args, **kwargs) + timing = timezone.now() - start_time + log.info(f'Function {fn.__name__} took {timing}') + return out + return inner + + +class RequestLoggingMiddleware(deprecation.MiddlewareMixin): + def should_log(self, request, response): + # Do not log healthchecks + return request.resolver_match and request.resolver_match.url_name not in ['utils-healthcheck'] + + def process_response(self, request, response): + if self.should_log(request, response): + user = '' + if getattr(request, 'user', None) and not request.user.is_anonymous: + user = request.user.username + log.info('%s %s %d (user=%s)', request.method, request.get_full_path(), response.status_code, user) + return response + + def process_exception(self, request, exception): + log.exception(str(exception)) + return None + diff --git a/api/src/reportcreator_api/utils/middleware.py b/api/src/reportcreator_api/utils/middleware.py new file mode 100644 index 0000000..3227c19 --- /dev/null +++ b/api/src/reportcreator_api/utils/middleware.py @@ -0,0 +1,57 @@ +from datetime import timedelta +from urllib.parse import urlparse +from django.conf import settings +from django.utils import timezone, cache, deprecation +from django.middleware.csrf import CsrfViewMiddleware +from whitenoise.middleware import WhiteNoiseMiddleware + + +class CustomCsrfMiddleware(CsrfViewMiddleware): + def process_view(self, request, *args, **kwargs): + # Skip CSRF checks for requests that cannot be sent cross-origin without a preflight request + if request.content_type not in ['application/x-www-form-urlencoded', 'multipart/form-data', 'text/plain', ''] or \ + request.method != 'POST': + return None + + return super().process_view(request, *args, **kwargs) + + def _origin_verified(self, request): + if super()._origin_verified(request): + return True + + try: + parsed_origin = urlparse(request.META["HTTP_ORIGIN"]) + except ValueError: + return False + + # Allow skipping origin checks + return parsed_origin.scheme + '://*' in settings.CSRF_TRUSTED_ORIGINS + + + +class ExtendSessionMiddleware(deprecation.MiddlewareMixin): + def process_request(self, request): + if request.session and request.session.get_expiry_date() - timezone.now() > timedelta(request.session.get_expiry_age() / 2): + # Extend session lifetime + # When a session value is changed the session is updated in the DB and its lifetime is reset to SESSION_COOKIE_AGE + # This does not affect the "Expire" attribute on the session cookie. + # If SESSION_EXPIRE_AT_BROWSER_CLOSE=True, the Expire attribute is still unset + request.session['tmp_extend_session_time'] = request.session.get('tmp_extend_session_time', 0) + 1 + + +class AdminSessionMiddleware(deprecation.MiddlewareMixin): + def process_request(self, request): + if request.user and request.session and request.session.get('admin_permissions_enabled'): + setattr(request.user, 'admin_permissions_enabled', True) + + +class CacheControlMiddleware(deprecation.MiddlewareMixin): + def process_response(self, request, response): + cache.add_never_cache_headers(response) + return response + + +class PermissionsPolicyMiddleware(deprecation.MiddlewareMixin): + def process_response(self, request, response): + response.headers['Permissions-Policy'] = ', '.join(map(lambda t: f"{t[0]}={t[1] or '()'}", settings.PERMISSIONS_POLICY.items())) + return response diff --git a/api/src/reportcreator_api/utils/models.py b/api/src/reportcreator_api/utils/models.py new file mode 100644 index 0000000..da89e7a --- /dev/null +++ b/api/src/reportcreator_api/utils/models.py @@ -0,0 +1,89 @@ +import itertools +import uuid +import functools +from django.db import models +from django.utils import timezone +from django.contrib.contenttypes.fields import GenericRelation + + +class ModelDiffMixin(object): + """ + A model mixin that tracks model fields' values and provide some useful api + to know what fields have been changed. + """ + + def __init__(self, *args, **kwargs): + super(ModelDiffMixin, self).__init__(*args, **kwargs) + self.__initial = self._dict + + @property + def diff(self): + d1 = self.__initial + d2 = self._dict + diffs = [(k, (v, d2[k])) for k, v in d1.items() if v != d2[k]] + return dict(diffs) + + @property + def has_changed(self): + return bool(self.diff) + + @property + def changed_fields(self): + return self.diff.keys() + + def get_field_diff(self, field_name): + """ + Returns a diff for field if it's changed and None otherwise. + """ + return self.diff.get(field_name, None) + + def clear_changed_fields(self): + self.__initial = self._dict + + def save(self, *args, **kwargs): + """ + Saves model and set initial state. + """ + super(ModelDiffMixin, self).save(*args, **kwargs) + self.clear_changed_fields() + + @property + def _dict(self): + diff_fields = {field.attname for field in self._meta.fields if not isinstance(field, GenericRelation)} - self.get_deferred_fields() + + out = {} + for f in itertools.chain(self._meta.concrete_fields, self._meta.private_fields, self._meta.many_to_many): + if getattr(f, 'attname', None) in diff_fields: + v = f.value_from_object(self) + if isinstance(v, (dict, list)): + v = v.copy() + out[f.attname] = v + return out + + +def now(): + return timezone.now() + + +class BaseModel(ModelDiffMixin, models.Model): + id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False) + created = models.DateTimeField(default=now, editable=False) + updated = models.DateTimeField(auto_now=True, editable=False) + + class Meta: + abstract = True + ordering = ['-created'] + + +def disable_for_loaddata(signal_handler): + """ + Decorator that turns off signal handlers when loading fixture data. + """ + + @functools.wraps(signal_handler) + def wrapper(*args, **kwargs): + if kwargs.get('raw'): + return + signal_handler(*args, **kwargs) + return wrapper + diff --git a/api/src/reportcreator_api/utils/relations.py b/api/src/reportcreator_api/utils/relations.py new file mode 100644 index 0000000..d213568 --- /dev/null +++ b/api/src/reportcreator_api/utils/relations.py @@ -0,0 +1,94 @@ +from django.db.models import OneToOneRel, OneToOneField, ForeignObject +from django.db.models.fields.related_descriptors import ReverseOneToOneDescriptor +from django.db.models.fields.related import RelatedField +from django.contrib.contenttypes.fields import GenericRelation, GenericRel, GenericForeignKey + + +class GenericOneToOneForeignKey(GenericForeignKey): + pass + + +class GenericOneToOneRel(GenericRel): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.multiple = False + + +class GenericOneToOneRelation(GenericRelation): + one_to_many = False + many_to_one = False + one_to_one = True + rel_class = GenericOneToOneRel + + def __init__(self, *args, **kwargs) -> None: + super().__init__(*args, **kwargs) + self.multiple = False + + def get_internal_type(self) -> str: + return OneToOneField.__name__ + # return None + + def contribute_to_class(self, cls, name, **kwargs): + super().contribute_to_class(cls, name, **kwargs) + setattr(cls, name, ReverseGenericOneToOneDescriptor(self)) + + def get_accessor_name(self): + return '' + + +class ReverseGenericOneToOneDescriptor(ReverseOneToOneDescriptor): + def __get__(self, instance, cls=None): + if instance is None: + return self + + # The related instance is loaded from the database and then cached + # by the field on the model instance state. It can also be pre-cached + # by the forward accessor (ForwardManyToOneDescriptor). + try: + rel_obj = self.related.get_cached_value(instance) + except KeyError: + related_pk = instance.pk + if related_pk is None: + rel_obj = None + else: + filter_args = { + self.related.content_type_field_name: self.related.get_content_type(), + self.related.object_id_field_name: related_pk, + } + try: + rel_obj = self.get_queryset(instance=instance).get(**filter_args) + except self.related.related_model.DoesNotExist: + rel_obj = None + self.related.set_cached_value(instance, rel_obj) + + return rel_obj + + def __set__(self, instance, value): + if value is None: + # Update the cached related instance (if any) & clear the cache. + # Following the example above, this would be the cached + # ``restaurant`` instance (if any). + rel_obj = self.related.get_cached_value(instance, default=None) + if rel_obj is not None: + # Remove the ``restaurant`` instance from the ``place`` + # instance cache. + self.related.delete_cached_value(instance) + # Set the ``place`` field on the ``restaurant`` + # instance to None. + setattr(rel_obj, self.related.name, None) + elif not isinstance(value, self.related.related_model): + # An object must be an instance of the related class. + raise ValueError( + 'Cannot assign "%r": "%s.%s" must be a "%s" instance.' + % ( + value, + instance._meta.object_name, + self.related.get_accessor_name(), + self.related.related_model._meta.object_name, + ) + ) + else: + # Set the related instance cache used by __get__ to avoid an SQL query + # when accessing the attribute we just set. + self.related.set_cached_value(instance, value) + diff --git a/api/src/reportcreator_api/utils/storages.py b/api/src/reportcreator_api/utils/storages.py new file mode 100644 index 0000000..267fe2c --- /dev/null +++ b/api/src/reportcreator_api/utils/storages.py @@ -0,0 +1,63 @@ +import os + +from django.core.files.storage import FileSystemStorage, InMemoryStorage +from storages.backends.s3boto3 import S3Boto3Storage + +from reportcreator_api.archive.crypto.storage import EncryptedStorageMixin + + +class FileSystemOverwriteStorage(FileSystemStorage): + """ + FileSystemStorage that overwrites the original file if it already exists + """ + def __init__(self, location=None, **kwargs): + super().__init__(location=location, **kwargs) + + # Create directory if it does not exist + if not os.path.exists(location): + os.makedirs(location) + + def _save(self, name, content): + self.delete(name) + return super()._save(name, content) + + +class UnencryptedFileSystemStorage(FileSystemStorage): + def __init__(self, location=None, base_url=None, file_permissions_mode=None, directory_permissions_mode=None, **kwargs): + super().__init__( + location=location, + base_url=base_url, + file_permissions_mode=file_permissions_mode, + directory_permissions_mode=directory_permissions_mode, + ) + + +class EncryptedFileSystemStorage(EncryptedStorageMixin, UnencryptedFileSystemStorage): + pass + + +class UnencryptedS3Storage(S3Boto3Storage): + def __init__(self, access_key=None, secret_key=None, security_token=None, bucket_name=None, endpoint_url=None, location=None, **kwargs) -> None: + super().__init__( + access_key=access_key, + secret_key=secret_key, + security_token=security_token, + bucket_name=bucket_name, + endpoint_url=endpoint_url, + location=str(location), + ) + + def get_default_settings(self): + return super().get_default_settings() | { + 'security_token': None, + } + + +class EncryptedS3Storage(EncryptedStorageMixin, UnencryptedS3Storage): + pass + + +class EncryptedInMemoryStorage(EncryptedStorageMixin, InMemoryStorage): + def __init__(self, **kwargs) -> None: + super().__init__() + diff --git a/api/src/reportcreator_api/utils/tasks.py b/api/src/reportcreator_api/utils/tasks.py new file mode 100644 index 0000000..113130e --- /dev/null +++ b/api/src/reportcreator_api/utils/tasks.py @@ -0,0 +1,6 @@ +from django.core.management import call_command + + +def clear_sessions(task_info): + call_command('clearsessions') + diff --git a/api/src/reportcreator_api/utils/throttling.py b/api/src/reportcreator_api/utils/throttling.py new file mode 100644 index 0000000..83bbd03 --- /dev/null +++ b/api/src/reportcreator_api/utils/throttling.py @@ -0,0 +1,19 @@ +import re +from rest_framework import throttling + + +class ScopedUserRateThrottle(throttling.ScopedRateThrottle): + def parse_rate(self, rate): + """ + Given the request rate string, return a two tuple of: + , + """ + if rate is None: + return (None, None) + m = re.match(r'^(?P[0-9]+)/(?P[0-9]+)?(?Ps|m|h|d)$', rate) + return int(m.group('rate')), {'s': 1, 'm': 60, 'h': 3600, 'd': 86400}[m.group('period')] * int(m.group('mult') or 1) + + def get_ident(self, request): + if request.user and not request.user.is_anonymous: + return str(request.user.id) + return super().get_ident() diff --git a/api/src/reportcreator_api/utils/utils.py b/api/src/reportcreator_api/utils/utils.py new file mode 100644 index 0000000..9fa7943 --- /dev/null +++ b/api/src/reportcreator_api/utils/utils.py @@ -0,0 +1,98 @@ +from datetime import date +from itertools import groupby +from typing import Union, Iterable, OrderedDict +import uuid + + +def remove_duplicates(lst: list) -> list: + return list(dict.fromkeys(lst)) + + +def find_all_indices(s: str, find: str): + idx = 0 + while True: + idx = s.find(find, idx) + if idx == -1: + break + else: + yield idx + idx += 1 + + +def get_key_or_attr(d: Union[dict, object], k: str, default=None): + return d.get(k, default) if isinstance(d, (dict, OrderedDict)) else getattr(d, k, default) + + +def copy_keys(d: Union[dict, object], keys: Iterable[str]) -> dict: + keys = set(keys) + out = {} + for k in keys: + if isinstance(d, (dict, OrderedDict)): + if k in d: + out[k] = d[k] + else: + if hasattr(d, k): + out[k] = getattr(d, k) + return out + + +def omit_keys(d: dict, keys: Iterable[str]) -> dict: + keys = set(keys) + return dict(filter(lambda t: t[0] not in keys, d.items())) + + +def omit_items(l: Iterable, items: Iterable) -> list: + l = list(l) + items = set(items) + for i in items: + while True: + try: + l.remove(i) + except ValueError: + break + return l + + +def is_uuid(val): + try: + uuid.UUID(val) + return True + except (ValueError, TypeError, AttributeError): + return False + + +def is_date_string(val): + try: + date.fromisoformat(val) + return True + except (ValueError, TypeError, AttributeError): + return False + + +def merge(*args): + """ + Recursively merge dicts + """ + out = None + for d in args: + if isinstance(d, (dict, OrderedDict)) and isinstance(out, (dict, OrderedDict)): + for k, v in d.items(): + if k not in out: + out[k] = v + else: + out[k] = merge(out.get(k), v) + elif isinstance(d, list) and isinstance(out, list): + l = [] + for i, dv in enumerate(d): + if len(out) > i: + l.append(merge(out[i], dv)) + else: + l.append(dv) + out = l + else: + out = d + return out + + +def groupby_to_dict(data, key): + return dict(map(lambda t: (t[0], list(t[1])), groupby(sorted(data, key=key), key=key))) \ No newline at end of file diff --git a/deploy/.env b/deploy/.env new file mode 100644 index 0000000..13df99d --- /dev/null +++ b/deploy/.env @@ -0,0 +1 @@ +SYSREPTOR_VERSION=0.76 diff --git a/deploy/.gitignore b/deploy/.gitignore new file mode 100644 index 0000000..0e222bd --- /dev/null +++ b/deploy/.gitignore @@ -0,0 +1 @@ +app.env diff --git a/deploy/app.env.example b/deploy/app.env.example new file mode 100644 index 0000000..bae6a37 --- /dev/null +++ b/deploy/app.env.example @@ -0,0 +1,26 @@ +# See https://docs.sysreptor.com/setup/configuration/ + +# Change this key and make sure it remains secret. +# https://docs.sysreptor.com/setup/configuration/#django-secret-key +SECRET_KEY="TODO-change-me-Z6cuMithzO0fMn3ZqJ7nTg0YJznoHiJXoJCNngQM4Kqzzd3fiYKdVx9ZidvTzqsm" + +# Change keys and make sure encryption keys remain secret. +# https://docs.sysreptor.com/setup/configuration/#data-encryption-at-rest +# ENCRYPTION_KEYS='[{"id": "TODO-change-me-unique-key-id-5cdda4c0-a16c-4ae2-8a16-aa2ff258530d", "key": "256 bit (32 byte) base64 encoded AES key", "cipher": "AES-GCM", "revoked": false}]' +# DEFAULT_ENCRYPTION_KEY_ID="TODO-change-me-unique-key-id-5cdda4c0-a16c-4ae2-8a16-aa2ff258530d" + +# https://docs.sysreptor.com/setup/configuration/#fido2webauthn +# MFA_FIDO2_RP_ID="sysreptor.example.com" + +# Do not use debug mode in production environments +# https://docs.sysreptor.com/setup/configuration/#debug-mode +DEBUG=off + +# https://docs.sysreptor.com/setup/configuration/#license-key +# LICENSE="" + +# https://docs.sysreptor.com/setup/configuration/#spell-check +# SPELLCHECK_DICTIONARY_PER_USER=false + +# https://docs.sysreptor.com/setup/configuration/#private-designs +# ENABLE_PRIVATE_DESIGNS=false \ No newline at end of file diff --git a/deploy/docker-compose.override.yml b/deploy/docker-compose.override.yml new file mode 100644 index 0000000..efd3a75 --- /dev/null +++ b/deploy/docker-compose.override.yml @@ -0,0 +1,32 @@ +version: '3.9' +name: sysreptor + +services: + app: + environment: + SPELLCHECK_URL: http://languagetool:8010/ + depends_on: + languagetool: + condition: service_started + languagetool: + build: + context: ../languagetool + container_name: 'sysreptor-languagetool' + init: true + environment: + languagetool_dbHost: db + languagetool_dbName: reportcreator + languagetool_dbUsername: reportcreator + languagetool_dbPassword: reportcreator + expose: + - 8010 + healthcheck: + test: ["CMD", "curl", "-f", "-so", "/dev/null", "http://localhost:8010/v2/languages"] + interval: 30s + timeout: 30s + retries: 5 + start_period: 10s + restart: always + depends_on: + db: + condition: service_healthy \ No newline at end of file diff --git a/deploy/docker-compose.yml b/deploy/docker-compose.yml new file mode 100644 index 0000000..6bd8686 --- /dev/null +++ b/deploy/docker-compose.yml @@ -0,0 +1,66 @@ +version: '3.9' +name: sysreptor + +services: + db: + image: 'postgres:14' + container_name: 'sysreptor-db' + environment: + POSTGRES_USER: reportcreator + POSTGRES_PASSWORD: reportcreator + POSTGRES_DB: reportcreator + PGDATA: /data + volumes: + - type: volume + source: db-data + target: /data + expose: + - 5432 + healthcheck: + test: ["CMD-SHELL", "pg_isready -U reportcreator"] + interval: 2s + timeout: 5s + retries: 30 + restart: always + stop_grace_period: 120s + app: + build: + context: ../ + target: api + args: + VERSION: ${SYSREPTOR_VERSION} + container_name: 'sysreptor-app' + init: true + volumes: + - type: volume + source: app-data + target: /data + expose: + - 8000 + ports: + - "127.0.0.1:8000:8000" + environment: + DATABASE_HOST: db + DATABASE_NAME: reportcreator + DATABASE_USER: reportcreator + DATABASE_PASSWORD: reportcreator + env_file: app.env + restart: always + healthcheck: + test: ["CMD", "curl", "-f", "http://localhost:8000/api/v1/utils/healthcheck/"] + interval: 30s + timeout: 30s + retries: 5 + start_period: 10s + depends_on: + db: + condition: service_healthy + + +volumes: + db-data: + name: sysreptor-db-data + external: true + app-data: + name: sysreptor-app-data + external: true diff --git a/deploy/sysreptor.nginx b/deploy/sysreptor.nginx new file mode 100644 index 0000000..dcc8354 --- /dev/null +++ b/deploy/sysreptor.nginx @@ -0,0 +1,31 @@ +server { + listen 80 default_server; + server_name _; + # redirect all HTTP to HTTPS + return 301 https://$host$request_uri; +} + +server { + listen 443 ssl default_server; + server_name _; + + ssl_certificate /etc/ssl/certs/ssl-cert-snakeoil.pem; + ssl_certificate_key /etc/ssl/private/ssl-cert-snakeoil.key; + ssl_session_timeout 1d; + ssl_session_cache shared:MozSSL:10m; # about 40000 sessions + ssl_session_tickets off; + ssl_protocols TLSv1.3; + ssl_prefer_server_ciphers off; + + # HSTS (ngx_http_headers_module is required) (63072000 seconds) + # Uncomment if desired + #add_header Strict-Transport-Security "max-age=63072000" always; + + ssl_stapling on; + ssl_stapling_verify on; + + location / { + include proxy_params; + proxy_pass http://127.0.0.1:8000; + } +} diff --git a/frontend/.babelrc b/frontend/.babelrc new file mode 100644 index 0000000..decdae6 --- /dev/null +++ b/frontend/.babelrc @@ -0,0 +1,17 @@ +{ + "env": { + "test": { + "presets": [ + [ + "@babel/preset-env", + { + "targets": { + "node": "current" + } + } + ] + ], + "plugins": ["@babel/plugin-transform-runtime"] + } + } +} diff --git a/frontend/.editorconfig b/frontend/.editorconfig new file mode 100644 index 0000000..5d12634 --- /dev/null +++ b/frontend/.editorconfig @@ -0,0 +1,13 @@ +# editorconfig.org +root = true + +[*] +indent_style = space +indent_size = 2 +end_of_line = lf +charset = utf-8 +trim_trailing_whitespace = true +insert_final_newline = true + +[*.md] +trim_trailing_whitespace = false diff --git a/frontend/.eslintrc.js b/frontend/.eslintrc.js new file mode 100644 index 0000000..d5c1488 --- /dev/null +++ b/frontend/.eslintrc.js @@ -0,0 +1,41 @@ +/* eslint-disable quote-props */ +module.exports = { + root: true, + env: { + browser: true, + node: true + }, + parserOptions: { + parser: '@babel/eslint-parser', + requireConfigFile: false + }, + extends: [ + '@nuxtjs', + 'plugin:nuxt/recommended' + ], + plugins: [ + ], + // add your custom rules here + rules: { + "comma-dangle": "off", + "semi": "off", + "vue/multi-word-component-names": "off", + "space-before-function-paren": "off", + "vue/singleline-html-element-content-newline": "off", + "no-trailing-spaces": "off", + "vue/max-attributes-per-line": "off", + "vue/attributes-order": "off", + "multiline-ternary": "off", + "operator-linebreak": "off", + "quotes": "off", + "vue/html-self-closing": "off", + "vue/valid-v-slot": "off", + + "no-unused-vars": "warn", + "vue/no-unused-components": "warn", + "eol-last": "warn", + "no-multiple-empty-lines": "warn", + "object-curly-spacing": "warn", + "prefer-const": "warn", + } +} diff --git a/frontend/.gitignore b/frontend/.gitignore new file mode 100644 index 0000000..c7dec45 --- /dev/null +++ b/frontend/.gitignore @@ -0,0 +1,93 @@ +# Created by .ignore support plugin (hsz.mobi) +### Node template +# Logs +/logs +*.log +npm-debug.log* +yarn-debug.log* +yarn-error.log* + +# Runtime data +pids +*.pid +*.seed +*.pid.lock + +# Directory for instrumented libs generated by jscoverage/JSCover +lib-cov + +# Coverage directory used by tools like istanbul +coverage + +# nyc test coverage +.nyc_output + +# Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files) +.grunt + +# Bower dependency directory (https://bower.io/) +bower_components + +# node-waf configuration +.lock-wscript + +# Compiled binary addons (https://nodejs.org/api/addons.html) +build/Release + +# Dependency directories +node_modules/ +jspm_packages/ + +# TypeScript v1 declaration files +typings/ + +# Optional npm cache directory +.npm + +# Optional eslint cache +.eslintcache + +# Optional REPL history +.node_repl_history + +# Output of 'npm pack' +*.tgz + +# Yarn Integrity file +.yarn-integrity + +# dotenv environment variables file +.env + +# parcel-bundler cache (https://parceljs.org/) +.cache + +# next.js build output +.next + +# nuxt.js build output +.nuxt + +# Nuxt generate +dist + +# vuepress build output +.vuepress/dist + +# Serverless directories +.serverless + +# IDE / Editor +.idea + +# Service worker +sw.* + +# macOS +.DS_Store + +# Vim swap files +*.swp + +# Unit test output +test-reports diff --git a/frontend/README.md b/frontend/README.md new file mode 100644 index 0000000..84dbd9b --- /dev/null +++ b/frontend/README.md @@ -0,0 +1,69 @@ +# reportcreator-frontend + +## Build Setup + +```bash +# install dependencies +$ npm install + +# serve with hot reload at localhost:3000 +$ npm run dev + +# build for production and launch server +$ npm run build +$ npm run start + +# generate static project +$ npm run generate +``` + +For detailed explanation on how things work, check out the [documentation](https://nuxtjs.org). + +## Special Directories + +You can create the following extra directories, some of which have special behaviors. Only `pages` is required; you can delete them if you don't want to use their functionality. + +### `assets` + +The assets directory contains your uncompiled assets such as Stylus or Sass files, images, or fonts. + +More information about the usage of this directory in [the documentation](https://nuxtjs.org/docs/2.x/directory-structure/assets). + +### `components` + +The components directory contains your Vue.js components. Components make up the different parts of your page and can be reused and imported into your pages, layouts and even other components. + +More information about the usage of this directory in [the documentation](https://nuxtjs.org/docs/2.x/directory-structure/components). + +### `layouts` + +Layouts are a great help when you want to change the look and feel of your Nuxt app, whether you want to include a sidebar or have distinct layouts for mobile and desktop. + +More information about the usage of this directory in [the documentation](https://nuxtjs.org/docs/2.x/directory-structure/layouts). + + +### `pages` + +This directory contains your application views and routes. Nuxt will read all the `*.vue` files inside this directory and setup Vue Router automatically. + +More information about the usage of this directory in [the documentation](https://nuxtjs.org/docs/2.x/get-started/routing). + +### `plugins` + +The plugins directory contains JavaScript plugins that you want to run before instantiating the root Vue.js Application. This is the place to add Vue plugins and to inject functions or constants. Every time you need to use `Vue.use()`, you should create a file in `plugins/` and add its path to plugins in `nuxt.config.js`. + +More information about the usage of this directory in [the documentation](https://nuxtjs.org/docs/2.x/directory-structure/plugins). + +### `static` + +This directory contains your static files. Each file inside this directory is mapped to `/`. + +Example: `/static/robots.txt` is mapped as `/robots.txt`. + +More information about the usage of this directory in [the documentation](https://nuxtjs.org/docs/2.x/directory-structure/static). + +### `store` + +This directory contains your Vuex store files. Creating a file in this directory automatically activates Vuex. + +More information about the usage of this directory in [the documentation](https://nuxtjs.org/docs/2.x/directory-structure/store). diff --git a/frontend/assets/emojis/sheet_twitter_32_indexed_128.png b/frontend/assets/emojis/sheet_twitter_32_indexed_128.png new file mode 100644 index 0000000..24a3df7 Binary files /dev/null and b/frontend/assets/emojis/sheet_twitter_32_indexed_128.png differ diff --git a/frontend/assets/fonts/exo/exo-v20-latin-100.woff b/frontend/assets/fonts/exo/exo-v20-latin-100.woff new file mode 100644 index 0000000..0b9a22d Binary files /dev/null and b/frontend/assets/fonts/exo/exo-v20-latin-100.woff differ diff --git a/frontend/assets/fonts/exo/exo-v20-latin-100.woff2 b/frontend/assets/fonts/exo/exo-v20-latin-100.woff2 new file mode 100644 index 0000000..8c2d09d Binary files /dev/null and b/frontend/assets/fonts/exo/exo-v20-latin-100.woff2 differ diff --git a/frontend/assets/fonts/exo/exo-v20-latin-100italic.woff b/frontend/assets/fonts/exo/exo-v20-latin-100italic.woff new file mode 100644 index 0000000..f9f84f3 Binary files /dev/null and b/frontend/assets/fonts/exo/exo-v20-latin-100italic.woff differ diff --git a/frontend/assets/fonts/exo/exo-v20-latin-100italic.woff2 b/frontend/assets/fonts/exo/exo-v20-latin-100italic.woff2 new file mode 100644 index 0000000..bb609e0 Binary files /dev/null and b/frontend/assets/fonts/exo/exo-v20-latin-100italic.woff2 differ diff --git a/frontend/assets/fonts/exo/exo-v20-latin-200.woff b/frontend/assets/fonts/exo/exo-v20-latin-200.woff new file mode 100644 index 0000000..9afc210 Binary files /dev/null and b/frontend/assets/fonts/exo/exo-v20-latin-200.woff differ diff --git a/frontend/assets/fonts/exo/exo-v20-latin-200.woff2 b/frontend/assets/fonts/exo/exo-v20-latin-200.woff2 new file mode 100644 index 0000000..c19e541 Binary files /dev/null and b/frontend/assets/fonts/exo/exo-v20-latin-200.woff2 differ diff --git a/frontend/assets/fonts/exo/exo-v20-latin-200italic.woff b/frontend/assets/fonts/exo/exo-v20-latin-200italic.woff new file mode 100644 index 0000000..4f3a254 Binary files /dev/null and b/frontend/assets/fonts/exo/exo-v20-latin-200italic.woff differ diff --git a/frontend/assets/fonts/exo/exo-v20-latin-200italic.woff2 b/frontend/assets/fonts/exo/exo-v20-latin-200italic.woff2 new file mode 100644 index 0000000..ca12e13 Binary files /dev/null and b/frontend/assets/fonts/exo/exo-v20-latin-200italic.woff2 differ diff --git a/frontend/assets/fonts/exo/exo-v20-latin-300.woff b/frontend/assets/fonts/exo/exo-v20-latin-300.woff new file mode 100644 index 0000000..e13a754 Binary files /dev/null and b/frontend/assets/fonts/exo/exo-v20-latin-300.woff differ diff --git a/frontend/assets/fonts/exo/exo-v20-latin-300.woff2 b/frontend/assets/fonts/exo/exo-v20-latin-300.woff2 new file mode 100644 index 0000000..e3037d3 Binary files /dev/null and b/frontend/assets/fonts/exo/exo-v20-latin-300.woff2 differ diff --git a/frontend/assets/fonts/exo/exo-v20-latin-300italic.woff b/frontend/assets/fonts/exo/exo-v20-latin-300italic.woff new file mode 100644 index 0000000..1e60e92 Binary files /dev/null and b/frontend/assets/fonts/exo/exo-v20-latin-300italic.woff differ diff --git a/frontend/assets/fonts/exo/exo-v20-latin-300italic.woff2 b/frontend/assets/fonts/exo/exo-v20-latin-300italic.woff2 new file mode 100644 index 0000000..350edc6 Binary files /dev/null and b/frontend/assets/fonts/exo/exo-v20-latin-300italic.woff2 differ diff --git a/frontend/assets/fonts/exo/exo-v20-latin-500.woff b/frontend/assets/fonts/exo/exo-v20-latin-500.woff new file mode 100644 index 0000000..82729fe Binary files /dev/null and b/frontend/assets/fonts/exo/exo-v20-latin-500.woff differ diff --git a/frontend/assets/fonts/exo/exo-v20-latin-500.woff2 b/frontend/assets/fonts/exo/exo-v20-latin-500.woff2 new file mode 100644 index 0000000..16d3b56 Binary files /dev/null and b/frontend/assets/fonts/exo/exo-v20-latin-500.woff2 differ diff --git a/frontend/assets/fonts/exo/exo-v20-latin-500italic.woff b/frontend/assets/fonts/exo/exo-v20-latin-500italic.woff new file mode 100644 index 0000000..ec53b16 Binary files /dev/null and b/frontend/assets/fonts/exo/exo-v20-latin-500italic.woff differ diff --git a/frontend/assets/fonts/exo/exo-v20-latin-500italic.woff2 b/frontend/assets/fonts/exo/exo-v20-latin-500italic.woff2 new file mode 100644 index 0000000..dd86d56 Binary files /dev/null and b/frontend/assets/fonts/exo/exo-v20-latin-500italic.woff2 differ diff --git a/frontend/assets/fonts/exo/exo-v20-latin-600.woff b/frontend/assets/fonts/exo/exo-v20-latin-600.woff new file mode 100644 index 0000000..dc6cc6d Binary files /dev/null and b/frontend/assets/fonts/exo/exo-v20-latin-600.woff differ diff --git a/frontend/assets/fonts/exo/exo-v20-latin-600.woff2 b/frontend/assets/fonts/exo/exo-v20-latin-600.woff2 new file mode 100644 index 0000000..1527dca Binary files /dev/null and b/frontend/assets/fonts/exo/exo-v20-latin-600.woff2 differ diff --git a/frontend/assets/fonts/exo/exo-v20-latin-600italic.woff b/frontend/assets/fonts/exo/exo-v20-latin-600italic.woff new file mode 100644 index 0000000..e74917d Binary files /dev/null and b/frontend/assets/fonts/exo/exo-v20-latin-600italic.woff differ diff --git a/frontend/assets/fonts/exo/exo-v20-latin-600italic.woff2 b/frontend/assets/fonts/exo/exo-v20-latin-600italic.woff2 new file mode 100644 index 0000000..b87577d Binary files /dev/null and b/frontend/assets/fonts/exo/exo-v20-latin-600italic.woff2 differ diff --git a/frontend/assets/fonts/exo/exo-v20-latin-700.woff b/frontend/assets/fonts/exo/exo-v20-latin-700.woff new file mode 100644 index 0000000..54ac1e0 Binary files /dev/null and b/frontend/assets/fonts/exo/exo-v20-latin-700.woff differ diff --git a/frontend/assets/fonts/exo/exo-v20-latin-700.woff2 b/frontend/assets/fonts/exo/exo-v20-latin-700.woff2 new file mode 100644 index 0000000..275ce68 Binary files /dev/null and b/frontend/assets/fonts/exo/exo-v20-latin-700.woff2 differ diff --git a/frontend/assets/fonts/exo/exo-v20-latin-700italic.woff b/frontend/assets/fonts/exo/exo-v20-latin-700italic.woff new file mode 100644 index 0000000..1672127 Binary files /dev/null and b/frontend/assets/fonts/exo/exo-v20-latin-700italic.woff differ diff --git a/frontend/assets/fonts/exo/exo-v20-latin-700italic.woff2 b/frontend/assets/fonts/exo/exo-v20-latin-700italic.woff2 new file mode 100644 index 0000000..61f8408 Binary files /dev/null and b/frontend/assets/fonts/exo/exo-v20-latin-700italic.woff2 differ diff --git a/frontend/assets/fonts/exo/exo-v20-latin-800.woff b/frontend/assets/fonts/exo/exo-v20-latin-800.woff new file mode 100644 index 0000000..047ae9a Binary files /dev/null and b/frontend/assets/fonts/exo/exo-v20-latin-800.woff differ diff --git a/frontend/assets/fonts/exo/exo-v20-latin-800.woff2 b/frontend/assets/fonts/exo/exo-v20-latin-800.woff2 new file mode 100644 index 0000000..6cb44f8 Binary files /dev/null and b/frontend/assets/fonts/exo/exo-v20-latin-800.woff2 differ diff --git a/frontend/assets/fonts/exo/exo-v20-latin-800italic.woff b/frontend/assets/fonts/exo/exo-v20-latin-800italic.woff new file mode 100644 index 0000000..e960e4f Binary files /dev/null and b/frontend/assets/fonts/exo/exo-v20-latin-800italic.woff differ diff --git a/frontend/assets/fonts/exo/exo-v20-latin-800italic.woff2 b/frontend/assets/fonts/exo/exo-v20-latin-800italic.woff2 new file mode 100644 index 0000000..9eefec6 Binary files /dev/null and b/frontend/assets/fonts/exo/exo-v20-latin-800italic.woff2 differ diff --git a/frontend/assets/fonts/exo/exo-v20-latin-900.woff b/frontend/assets/fonts/exo/exo-v20-latin-900.woff new file mode 100644 index 0000000..e7ccdea Binary files /dev/null and b/frontend/assets/fonts/exo/exo-v20-latin-900.woff differ diff --git a/frontend/assets/fonts/exo/exo-v20-latin-900.woff2 b/frontend/assets/fonts/exo/exo-v20-latin-900.woff2 new file mode 100644 index 0000000..80b2b52 Binary files /dev/null and b/frontend/assets/fonts/exo/exo-v20-latin-900.woff2 differ diff --git a/frontend/assets/fonts/exo/exo-v20-latin-900italic.woff b/frontend/assets/fonts/exo/exo-v20-latin-900italic.woff new file mode 100644 index 0000000..0b42f69 Binary files /dev/null and b/frontend/assets/fonts/exo/exo-v20-latin-900italic.woff differ diff --git a/frontend/assets/fonts/exo/exo-v20-latin-900italic.woff2 b/frontend/assets/fonts/exo/exo-v20-latin-900italic.woff2 new file mode 100644 index 0000000..f8deff6 Binary files /dev/null and b/frontend/assets/fonts/exo/exo-v20-latin-900italic.woff2 differ diff --git a/frontend/assets/fonts/exo/exo-v20-latin-italic.woff b/frontend/assets/fonts/exo/exo-v20-latin-italic.woff new file mode 100644 index 0000000..75ec87e Binary files /dev/null and b/frontend/assets/fonts/exo/exo-v20-latin-italic.woff differ diff --git a/frontend/assets/fonts/exo/exo-v20-latin-italic.woff2 b/frontend/assets/fonts/exo/exo-v20-latin-italic.woff2 new file mode 100644 index 0000000..aef9299 Binary files /dev/null and b/frontend/assets/fonts/exo/exo-v20-latin-italic.woff2 differ diff --git a/frontend/assets/fonts/exo/exo-v20-latin-regular.woff b/frontend/assets/fonts/exo/exo-v20-latin-regular.woff new file mode 100644 index 0000000..ce6c1da Binary files /dev/null and b/frontend/assets/fonts/exo/exo-v20-latin-regular.woff differ diff --git a/frontend/assets/fonts/exo/exo-v20-latin-regular.woff2 b/frontend/assets/fonts/exo/exo-v20-latin-regular.woff2 new file mode 100644 index 0000000..0b81af6 Binary files /dev/null and b/frontend/assets/fonts/exo/exo-v20-latin-regular.woff2 differ diff --git a/frontend/assets/fonts/exo/exo.css b/frontend/assets/fonts/exo/exo.css new file mode 100644 index 0000000..7369f8a --- /dev/null +++ b/frontend/assets/fonts/exo/exo.css @@ -0,0 +1,162 @@ +/* exo-100 - latin */ +@font-face { + font-family: 'Exo'; + font-style: normal; + font-weight: 100; + src: local(''), + url('~/assets/fonts/exo/exo-v20-latin-100.woff2') format('woff2'), /* Chrome 26+, Opera 23+, Firefox 39+ */ + url('~/assets/fonts/exo/exo-v20-latin-100.woff') format('woff'); /* Chrome 6+, Firefox 3.6+, IE 9+, Safari 5.1+ */ +} +/* exo-200 - latin */ +@font-face { + font-family: 'Exo'; + font-style: normal; + font-weight: 200; + src: local(''), + url('~/assets/fonts/exo/exo-v20-latin-200.woff2') format('woff2'), /* Chrome 26+, Opera 23+, Firefox 39+ */ + url('~/assets/fonts/exo/exo-v20-latin-200.woff') format('woff'); /* Chrome 6+, Firefox 3.6+, IE 9+, Safari 5.1+ */ +} +/* exo-300 - latin */ +@font-face { + font-family: 'Exo'; + font-style: normal; + font-weight: 300; + src: local(''), + url('~/assets/fonts/exo/exo-v20-latin-300.woff2') format('woff2'), /* Chrome 26+, Opera 23+, Firefox 39+ */ + url('~/assets/fonts/exo/exo-v20-latin-300.woff') format('woff'); /* Chrome 6+, Firefox 3.6+, IE 9+, Safari 5.1+ */ +} +/* exo-regular - latin */ +@font-face { + font-family: 'Exo'; + font-style: normal; + font-weight: 400; + src: local(''), + url('~/assets/fonts/exo/exo-v20-latin-regular.woff2') format('woff2'), /* Chrome 26+, Opera 23+, Firefox 39+ */ + url('~/assets/fonts/exo/exo-v20-latin-regular.woff') format('woff'); /* Chrome 6+, Firefox 3.6+, IE 9+, Safari 5.1+ */ +} +/* exo-500 - latin */ +@font-face { + font-family: 'Exo'; + font-style: normal; + font-weight: 500; + src: local(''), + url('~/assets/fonts/exo/exo-v20-latin-500.woff2') format('woff2'), /* Chrome 26+, Opera 23+, Firefox 39+ */ + url('~/assets/fonts/exo/exo-v20-latin-500.woff') format('woff'); /* Chrome 6+, Firefox 3.6+, IE 9+, Safari 5.1+ */ +} +/* exo-600 - latin */ +@font-face { + font-family: 'Exo'; + font-style: normal; + font-weight: 600; + src: local(''), + url('~/assets/fonts/exo/exo-v20-latin-600.woff2') format('woff2'), /* Chrome 26+, Opera 23+, Firefox 39+ */ + url('~/assets/fonts/exo/exo-v20-latin-600.woff') format('woff'); /* Chrome 6+, Firefox 3.6+, IE 9+, Safari 5.1+ */ +} +/* exo-700 - latin */ +@font-face { + font-family: 'Exo'; + font-style: normal; + font-weight: 700; + src: local(''), + url('~/assets/fonts/exo/exo-v20-latin-700.woff2') format('woff2'), /* Chrome 26+, Opera 23+, Firefox 39+ */ + url('~/assets/fonts/exo/exo-v20-latin-700.woff') format('woff'); /* Chrome 6+, Firefox 3.6+, IE 9+, Safari 5.1+ */ +} +/* exo-800 - latin */ +@font-face { + font-family: 'Exo'; + font-style: normal; + font-weight: 800; + src: local(''), + url('~/assets/fonts/exo/exo-v20-latin-800.woff2') format('woff2'), /* Chrome 26+, Opera 23+, Firefox 39+ */ + url('~/assets/fonts/exo/exo-v20-latin-800.woff') format('woff'); /* Chrome 6+, Firefox 3.6+, IE 9+, Safari 5.1+ */ +} +/* exo-900 - latin */ +@font-face { + font-family: 'Exo'; + font-style: normal; + font-weight: 900; + src: local(''), + url('~/assets/fonts/exo/exo-v20-latin-900.woff2') format('woff2'), /* Chrome 26+, Opera 23+, Firefox 39+ */ + url('~/assets/fonts/exo/exo-v20-latin-900.woff') format('woff'); /* Chrome 6+, Firefox 3.6+, IE 9+, Safari 5.1+ */ +} +/* exo-100italic - latin */ +@font-face { + font-family: 'Exo'; + font-style: italic; + font-weight: 100; + src: local(''), + url('~/assets/fonts/exo/exo-v20-latin-100italic.woff2') format('woff2'), /* Chrome 26+, Opera 23+, Firefox 39+ */ + url('~/assets/fonts/exo/exo-v20-latin-100italic.woff') format('woff'); /* Chrome 6+, Firefox 3.6+, IE 9+, Safari 5.1+ */ +} +/* exo-200italic - latin */ +@font-face { + font-family: 'Exo'; + font-style: italic; + font-weight: 200; + src: local(''), + url('~/assets/fonts/exo/exo-v20-latin-200italic.woff2') format('woff2'), /* Chrome 26+, Opera 23+, Firefox 39+ */ + url('~/assets/fonts/exo/exo-v20-latin-200italic.woff') format('woff'); /* Chrome 6+, Firefox 3.6+, IE 9+, Safari 5.1+ */ +} +/* exo-300italic - latin */ +@font-face { + font-family: 'Exo'; + font-style: italic; + font-weight: 300; + src: local(''), + url('~/assets/fonts/exo/exo-v20-latin-300italic.woff2') format('woff2'), /* Chrome 26+, Opera 23+, Firefox 39+ */ + url('~/assets/fonts/exo/exo-v20-latin-300italic.woff') format('woff'); /* Chrome 6+, Firefox 3.6+, IE 9+, Safari 5.1+ */ +} +/* exo-italic - latin */ +@font-face { + font-family: 'Exo'; + font-style: italic; + font-weight: 400; + src: local(''), + url('~/assets/fonts/exo/exo-v20-latin-italic.woff2') format('woff2'), /* Chrome 26+, Opera 23+, Firefox 39+ */ + url('~/assets/fonts/exo/exo-v20-latin-italic.woff') format('woff'); /* Chrome 6+, Firefox 3.6+, IE 9+, Safari 5.1+ */ +} +/* exo-500italic - latin */ +@font-face { + font-family: 'Exo'; + font-style: italic; + font-weight: 500; + src: local(''), + url('~/assets/fonts/exo/exo-v20-latin-500italic.woff2') format('woff2'), /* Chrome 26+, Opera 23+, Firefox 39+ */ + url('~/assets/fonts/exo/exo-v20-latin-500italic.woff') format('woff'); /* Chrome 6+, Firefox 3.6+, IE 9+, Safari 5.1+ */ +} +/* exo-600italic - latin */ +@font-face { + font-family: 'Exo'; + font-style: italic; + font-weight: 600; + src: local(''), + url('~/assets/fonts/exo/exo-v20-latin-600italic.woff2') format('woff2'), /* Chrome 26+, Opera 23+, Firefox 39+ */ + url('~/assets/fonts/exo/exo-v20-latin-600italic.woff') format('woff'); /* Chrome 6+, Firefox 3.6+, IE 9+, Safari 5.1+ */ +} +/* exo-700italic - latin */ +@font-face { + font-family: 'Exo'; + font-style: italic; + font-weight: 700; + src: local(''), + url('~/assets/fonts/exo/exo-v20-latin-700italic.woff2') format('woff2'), /* Chrome 26+, Opera 23+, Firefox 39+ */ + url('~/assets/fonts/exo/exo-v20-latin-700italic.woff') format('woff'); /* Chrome 6+, Firefox 3.6+, IE 9+, Safari 5.1+ */ +} +/* exo-800italic - latin */ +@font-face { + font-family: 'Exo'; + font-style: italic; + font-weight: 800; + src: local(''), + url('~/assets/fonts/exo/exo-v20-latin-800italic.woff2') format('woff2'), /* Chrome 26+, Opera 23+, Firefox 39+ */ + url('~/assets/fonts/exo/exo-v20-latin-800italic.woff') format('woff'); /* Chrome 6+, Firefox 3.6+, IE 9+, Safari 5.1+ */ +} +/* exo-900italic - latin */ +@font-face { + font-family: 'Exo'; + font-style: italic; + font-weight: 900; + src: local(''), + url('~/assets/fonts/exo/exo-v20-latin-900italic.woff2') format('woff2'), /* Chrome 26+, Opera 23+, Firefox 39+ */ + url('~/assets/fonts/exo/exo-v20-latin-900italic.woff') format('woff'); /* Chrome 6+, Firefox 3.6+, IE 9+, Safari 5.1+ */ +} \ No newline at end of file diff --git a/frontend/assets/global.scss b/frontend/assets/global.scss new file mode 100644 index 0000000..105afc3 --- /dev/null +++ b/frontend/assets/global.scss @@ -0,0 +1,19 @@ +@import '~/assets/fonts/exo/exo.css'; +@import '@mdi/font/css/materialdesignicons.css'; + + +// Hide scrollbar when it is not needed +// Overrides default Vuetify behavior +// See https://github.com/vuetifyjs/vuetify/issues/1197 +html { + overflow-y: auto !important; +} + +body { + font-family: $body-font-family; + line-height: $line-height-root; +} + +.login-header { + background-color: $syslifters-darkblue !important; +} diff --git a/frontend/assets/mde-highlight.scss b/frontend/assets/mde-highlight.scss new file mode 100644 index 0000000..a6a342f --- /dev/null +++ b/frontend/assets/mde-highlight.scss @@ -0,0 +1,65 @@ +.cm-focused { + outline: none !important; +} + +/* inline markdown styles */ +.cm-content { + font-family: $body-font-family; + + .tok-h1, .tok-h2, .tok-h3, .tok-h4, .tok-h5, .tok-h6 { + font-weight: bold; + } + .tok-h1 { font-size: 2em; } + .tok-h2 { font-size: 1.75em; } + .tok-h3 { font-size: 1.5em; } + .tok-h4 { font-size: 1.25em; } + .tok-h5 { font-size: 1.1em; } + .tok-h6 { font-size: 1em; } + + .tok-strong { font-weight: bold } + .tok-emphasis { font-style: italic; } + .tok-strikethrough { text-decoration: line-through;} + + .tok-codeblock, .tok-inlinecode { + font-family: monospace; + background-color: map-deep-get($material-light, 'code', 'background'); + color: map-deep-get($material-light, 'code', 'color'); + } + + .tok-table { + font-family: monospace; + } + + .tok-footnote { + vertical-align: super; + font-size: 0.9em; + } + + .tok-link, .tok-image, .tok-footnote { + color: #7f8c8d; + } + .tok-url { + color: #aab2b3; + text-decoration: underline; + } + + .tok-quote { + color: #7f8c8d; + } + + .tok-todo { + background-color: $risk-color-critical; + color: white; + padding-left: 0.2em; + padding-right: 0.2em; + border-radius: 10%; + } + + // HTML tag highlighting + .tok-tagname, .tok-anglebracket, .tok-attributename, .tok-attributevalue, .tok-comment { font-family: monospace; } + .tok-tagname, .tok-anglebracket { color: #085; } + .tok-attributename { color: #795da3; } + .tok-attributevalue { color: #a11; } + .tok-comment { color: #940; } + +} \ No newline at end of file diff --git a/frontend/assets/splitpanes.scss b/frontend/assets/splitpanes.scss new file mode 100644 index 0000000..4f1c700 --- /dev/null +++ b/frontend/assets/splitpanes.scss @@ -0,0 +1,76 @@ +.splitpanes { + display: flex; + width: 100%; + height: 100%; + + &--vertical {flex-direction: row;} + &--horizontal {flex-direction: column;} + &--dragging * {user-select: none;} + + &__pane { + width: 100%; + height: 100%; + overflow: hidden; + // .splitpanes--vertical & {transition: width 0.2s ease-out;} + // .splitpanes--horizontal & {transition: height 0.2s ease-out;} + // .splitpanes--dragging & {transition: none;} + } + + // Disable default zoom behavior on touch device when double tapping splitter. + &__splitter {touch-action: none;} + &--vertical > .splitpanes__splitter {min-width: 1px;cursor: col-resize;} + &--horizontal > .splitpanes__splitter {min-height: 1px;cursor: row-resize;} +} + +.splitpanes.default-theme { + .splitpanes__splitter { + background-color: #fff; + box-sizing: border-box; + position: relative; + flex-shrink: 0; + &:before, &:after { + content: ""; + position: absolute; + top: 50%; + left: 50%; + background-color: rgba(0, 0, 0, .15); + transition: background-color 0.3s; + } + &:hover:before, &:hover:after {background-color: rgba(0, 0, 0, .25);} + &:first-child {cursor: auto;} + } +} + +.default-theme { + &.splitpanes .splitpanes .splitpanes__splitter { + z-index: 1; + } + &.splitpanes--vertical > .splitpanes__splitter, + .splitpanes--vertical > .splitpanes__splitter { + width: 7px; + border-left: 1px solid #eee; + border-right: 1px solid #eee; + margin-left: -1px; + &:before, &:after { + transform: translateY(-50%); + width: 1px; + height: 30px; + } + &:before {margin-left: -2px;} + &:after {margin-left: 1px;} + } + &.splitpanes--horizontal > .splitpanes__splitter, + .splitpanes--horizontal > .splitpanes__splitter { + height: 7px; + border-top: 1px solid #eee; + margin-top: -1px; + &:before, + &:after { + transform: translateX(-50%); + width: 30px; + height: 1px; + } + &:before {margin-top: -2px;} + &:after {margin-top: 1px;} + } +} diff --git a/frontend/assets/variables.scss b/frontend/assets/variables.scss new file mode 100644 index 0000000..6f8a724 --- /dev/null +++ b/frontend/assets/variables.scss @@ -0,0 +1,33 @@ +// Ref: https://github.com/nuxt-community/vuetify-module#customvariables +// +// The variables you want to modify +// $font-size-root: 20px; +$body-font-family: 'Exo'; + +@import '~vuetify/src/styles/styles.sass'; + + +// Colors +$risk-color-info: #14AE92; +$risk-color-low: #4285F5; +$risk-color-medium: #FBBC05; +$risk-color-high: #F76226; +$risk-color-critical: #E83221; + +$risk-color-levels: ( + 1: $risk-color-info, + 2: $risk-color-low, + 3: $risk-color-medium, + 4: $risk-color-high, + 5: $risk-color-critical +); + +$syslifters-darkblue: #001827; +$syslifters-lightblue: #0098DB; +$syslifters-orange: #E65E00; +$syslifters-white: #F9FDFF; +$syslifters-black: #262626; + +$status-color-finished: map-get($green, 'base'); + + diff --git a/frontend/components/AssetManager.vue b/frontend/components/AssetManager.vue new file mode 100644 index 0000000..e9dc8a1 --- /dev/null +++ b/frontend/components/AssetManager.vue @@ -0,0 +1,183 @@ + + + + + diff --git a/frontend/components/Btn/Confirm.vue b/frontend/components/Btn/Confirm.vue new file mode 100644 index 0000000..e50bf76 --- /dev/null +++ b/frontend/components/Btn/Confirm.vue @@ -0,0 +1,144 @@ + + + diff --git a/frontend/components/Btn/Copy.vue b/frontend/components/Btn/Copy.vue new file mode 100644 index 0000000..1b3d40e --- /dev/null +++ b/frontend/components/Btn/Copy.vue @@ -0,0 +1,44 @@ + + + diff --git a/frontend/components/Btn/Delete.vue b/frontend/components/Btn/Delete.vue new file mode 100644 index 0000000..670c280 --- /dev/null +++ b/frontend/components/Btn/Delete.vue @@ -0,0 +1,38 @@ + + + diff --git a/frontend/components/Btn/Export.vue b/frontend/components/Btn/Export.vue new file mode 100644 index 0000000..70b4c30 --- /dev/null +++ b/frontend/components/Btn/Export.vue @@ -0,0 +1,84 @@ + + + + + diff --git a/frontend/components/Btn/Import.vue b/frontend/components/Btn/Import.vue new file mode 100644 index 0000000..31d39cc --- /dev/null +++ b/frontend/components/Btn/Import.vue @@ -0,0 +1,76 @@ + + + + + diff --git a/frontend/components/Btn/Readonly.vue b/frontend/components/Btn/Readonly.vue new file mode 100644 index 0000000..2123429 --- /dev/null +++ b/frontend/components/Btn/Readonly.vue @@ -0,0 +1,35 @@ + + + diff --git a/frontend/components/CodeEditor.vue b/frontend/components/CodeEditor.vue new file mode 100644 index 0000000..7a1881c --- /dev/null +++ b/frontend/components/CodeEditor.vue @@ -0,0 +1,92 @@ + + + diff --git a/frontend/components/CreateDesignDialog.vue b/frontend/components/CreateDesignDialog.vue new file mode 100644 index 0000000..e029731 --- /dev/null +++ b/frontend/components/CreateDesignDialog.vue @@ -0,0 +1,87 @@ + + + diff --git a/frontend/components/CreateFindingDialog.vue b/frontend/components/CreateFindingDialog.vue new file mode 100644 index 0000000..bbe8931 --- /dev/null +++ b/frontend/components/CreateFindingDialog.vue @@ -0,0 +1,163 @@ + + + diff --git a/frontend/components/CvssChip.vue b/frontend/components/CvssChip.vue new file mode 100644 index 0000000..74dba2a --- /dev/null +++ b/frontend/components/CvssChip.vue @@ -0,0 +1,56 @@ + + + + + diff --git a/frontend/components/CvssField.vue b/frontend/components/CvssField.vue new file mode 100644 index 0000000..09aa74f --- /dev/null +++ b/frontend/components/CvssField.vue @@ -0,0 +1,225 @@ + + + + + diff --git a/frontend/components/CvssMetricInput.vue b/frontend/components/CvssMetricInput.vue new file mode 100644 index 0000000..aba6311 --- /dev/null +++ b/frontend/components/CvssMetricInput.vue @@ -0,0 +1,65 @@ + + + + + diff --git a/frontend/components/DynamicInputField.vue b/frontend/components/DynamicInputField.vue new file mode 100644 index 0000000..b25c4b3 --- /dev/null +++ b/frontend/components/DynamicInputField.vue @@ -0,0 +1,328 @@ + + + diff --git a/frontend/components/EditToolbar.vue b/frontend/components/EditToolbar.vue new file mode 100644 index 0000000..69732bb --- /dev/null +++ b/frontend/components/EditToolbar.vue @@ -0,0 +1,437 @@ + + + + + diff --git a/frontend/components/ErrorList.vue b/frontend/components/ErrorList.vue new file mode 100644 index 0000000..8266d45 --- /dev/null +++ b/frontend/components/ErrorList.vue @@ -0,0 +1,139 @@ + + + + + diff --git a/frontend/components/ExportButton.vue b/frontend/components/ExportButton.vue new file mode 100644 index 0000000..82ceeb5 --- /dev/null +++ b/frontend/components/ExportButton.vue @@ -0,0 +1,53 @@ + + + diff --git a/frontend/components/FetchLoader.vue b/frontend/components/FetchLoader.vue new file mode 100644 index 0000000..ecdbdd2 --- /dev/null +++ b/frontend/components/FetchLoader.vue @@ -0,0 +1,34 @@ + + + + + diff --git a/frontend/components/FillScreenHeight.vue b/frontend/components/FillScreenHeight.vue new file mode 100644 index 0000000..5dd7462 --- /dev/null +++ b/frontend/components/FillScreenHeight.vue @@ -0,0 +1,43 @@ + + + + + diff --git a/frontend/components/InputFieldDefinition.vue b/frontend/components/InputFieldDefinition.vue new file mode 100644 index 0000000..d0a09fc --- /dev/null +++ b/frontend/components/InputFieldDefinition.vue @@ -0,0 +1,350 @@ + + + diff --git a/frontend/components/LanguageChip.vue b/frontend/components/LanguageChip.vue new file mode 100644 index 0000000..0be7346 --- /dev/null +++ b/frontend/components/LanguageChip.vue @@ -0,0 +1,21 @@ + + + diff --git a/frontend/components/LanguageSelection.vue b/frontend/components/LanguageSelection.vue new file mode 100644 index 0000000..bbfdc16 --- /dev/null +++ b/frontend/components/LanguageSelection.vue @@ -0,0 +1,27 @@ + + + diff --git a/frontend/components/ListView.vue b/frontend/components/ListView.vue new file mode 100644 index 0000000..a0ea4f5 --- /dev/null +++ b/frontend/components/ListView.vue @@ -0,0 +1,58 @@ + + + diff --git a/frontend/components/LockInfo.vue b/frontend/components/LockInfo.vue new file mode 100644 index 0000000..3971b19 --- /dev/null +++ b/frontend/components/LockInfo.vue @@ -0,0 +1,29 @@ + + + + + diff --git a/frontend/components/LoginForm.vue b/frontend/components/LoginForm.vue new file mode 100644 index 0000000..93aaa88 --- /dev/null +++ b/frontend/components/LoginForm.vue @@ -0,0 +1,221 @@ + + + diff --git a/frontend/components/LoginOidcForm.vue b/frontend/components/LoginOidcForm.vue new file mode 100644 index 0000000..2b3a1af --- /dev/null +++ b/frontend/components/LoginOidcForm.vue @@ -0,0 +1,47 @@ + + + diff --git a/frontend/components/Markdown/Field.vue b/frontend/components/Markdown/Field.vue new file mode 100644 index 0000000..ac0a635 --- /dev/null +++ b/frontend/components/Markdown/Field.vue @@ -0,0 +1,79 @@ + + + diff --git a/frontend/components/Markdown/FieldContent.vue b/frontend/components/Markdown/FieldContent.vue new file mode 100644 index 0000000..ede3947 --- /dev/null +++ b/frontend/components/Markdown/FieldContent.vue @@ -0,0 +1,77 @@ + + + + + diff --git a/frontend/components/Markdown/Page.vue b/frontend/components/Markdown/Page.vue new file mode 100644 index 0000000..e16759c --- /dev/null +++ b/frontend/components/Markdown/Page.vue @@ -0,0 +1,91 @@ + + + + + diff --git a/frontend/components/Markdown/Preview.vue b/frontend/components/Markdown/Preview.vue new file mode 100644 index 0000000..397e496 --- /dev/null +++ b/frontend/components/Markdown/Preview.vue @@ -0,0 +1,232 @@ + + + + + diff --git a/frontend/components/Markdown/Statusbar.vue b/frontend/components/Markdown/Statusbar.vue new file mode 100644 index 0000000..308449b --- /dev/null +++ b/frontend/components/Markdown/Statusbar.vue @@ -0,0 +1,72 @@ + + + + + diff --git a/frontend/components/Markdown/TextField.vue b/frontend/components/Markdown/TextField.vue new file mode 100644 index 0000000..29b2418 --- /dev/null +++ b/frontend/components/Markdown/TextField.vue @@ -0,0 +1,69 @@ + + + diff --git a/frontend/components/Markdown/TextFieldContent.vue b/frontend/components/Markdown/TextFieldContent.vue new file mode 100644 index 0000000..06197c3 --- /dev/null +++ b/frontend/components/Markdown/TextFieldContent.vue @@ -0,0 +1,182 @@ + + + + + diff --git a/frontend/components/Markdown/Toolbar.vue b/frontend/components/Markdown/Toolbar.vue new file mode 100644 index 0000000..0110325 --- /dev/null +++ b/frontend/components/Markdown/Toolbar.vue @@ -0,0 +1,143 @@ + + + + + diff --git a/frontend/components/Markdown/ToolbarButton.vue b/frontend/components/Markdown/ToolbarButton.vue new file mode 100644 index 0000000..8781c1b --- /dev/null +++ b/frontend/components/Markdown/ToolbarButton.vue @@ -0,0 +1,39 @@ + + + + + diff --git a/frontend/components/MemberSelection.vue b/frontend/components/MemberSelection.vue new file mode 100644 index 0000000..95fe297 --- /dev/null +++ b/frontend/components/MemberSelection.vue @@ -0,0 +1,190 @@ + + + + + diff --git a/frontend/components/NotesSortableList.vue b/frontend/components/NotesSortableList.vue new file mode 100644 index 0000000..d0c4972 --- /dev/null +++ b/frontend/components/NotesSortableList.vue @@ -0,0 +1,153 @@ + + + + + diff --git a/frontend/components/NotificationMenuItem.vue b/frontend/components/NotificationMenuItem.vue new file mode 100644 index 0000000..16cbff9 --- /dev/null +++ b/frontend/components/NotificationMenuItem.vue @@ -0,0 +1,90 @@ + + + diff --git a/frontend/components/PageLoader.vue b/frontend/components/PageLoader.vue new file mode 100644 index 0000000..21325f1 --- /dev/null +++ b/frontend/components/PageLoader.vue @@ -0,0 +1,47 @@ + + + diff --git a/frontend/components/Pdf.vue b/frontend/components/Pdf.vue new file mode 100644 index 0000000..b961aae --- /dev/null +++ b/frontend/components/Pdf.vue @@ -0,0 +1,48 @@ +