diff --git a/.github/workflows/docker_build_test.yml b/.github/workflows/docker_build_test.yml new file mode 100644 index 0000000..d07d0c2 --- /dev/null +++ b/.github/workflows/docker_build_test.yml @@ -0,0 +1,29 @@ +name: Build and Test Docker Image + +on: + push: + +env: + TEST_TAG: papermountain/gitlab-watchman:test + +jobs: + docker: + runs-on: ubuntu-latest + steps: + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: Build + uses: docker/build-push-action@v6 + with: + load: true + tags: ${{ env.TEST_TAG }} + + - name: Inspect + run: | + docker image inspect ${{ env.TEST_TAG }} + + - name: Test + run: | + docker run --rm ${{ env.TEST_TAG }} --version + docker run --rm ${{ env.TEST_TAG }} --help \ No newline at end of file diff --git a/.github/workflows/dockerpublish.yml b/.github/workflows/docker_publish.yml similarity index 91% rename from .github/workflows/dockerpublish.yml rename to .github/workflows/docker_publish.yml index 2f29e68..994305a 100644 --- a/.github/workflows/dockerpublish.yml +++ b/.github/workflows/docker_publish.yml @@ -1,8 +1,8 @@ -name: ci +name: Publish Docker Image on: push: - branches: [ master ] + branches: [ master, main ] jobs: build: diff --git a/.github/workflows/python_package.yml b/.github/workflows/python_package.yml new file mode 100644 index 0000000..773d32b --- /dev/null +++ b/.github/workflows/python_package.yml @@ -0,0 +1,34 @@ +name: Test Python Package + +on: + push: + branches: [ develop, feature/**, release/**, hotfix/** ] + pull_request: + branches: [ develop, feature/**, release/**, hotfix/** ] + +jobs: + build-ubuntu: + + runs-on: ubuntu-latest + strategy: + matrix: + python-version: ["3.10", "3.11", "3.12", "3.13"] + + steps: + - uses: actions/checkout@v2 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python-version }} + - name: Install dependencies + run: | + pip install poetry + poetry install + - name: Test setup & install + run: | + poetry build + python3 -m pip install dist/*.whl + - name: Test run + run: | + gitlab-watchman --version + gitlab-watchman --help \ No newline at end of file diff --git a/.github/workflows/python_publish.yml b/.github/workflows/python_publish.yml new file mode 100644 index 0000000..ee88dff --- /dev/null +++ b/.github/workflows/python_publish.yml @@ -0,0 +1,24 @@ +name: Poetry Publish + +on: + push: + branches: [ master, main ] + +jobs: + deploy: + + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v2 + - name: Set up Python + uses: actions/setup-python@v2 + with: + python-version: '3.12' + - name: Install dependencies + run: | + pip install poetry + poetry install + poetry config pypi-token.pypi "${{ secrets.PYPI_TOKEN }}" + - name: Publish package + run: poetry publish --build \ No newline at end of file diff --git a/.github/workflows/python_run_tests.yml b/.github/workflows/python_run_tests.yml new file mode 100644 index 0000000..a3d8ca8 --- /dev/null +++ b/.github/workflows/python_run_tests.yml @@ -0,0 +1,33 @@ +name: Run Unit Test via Pytest + +on: + push: + +jobs: + build: + runs-on: ubuntu-latest + strategy: + matrix: + python-version: ["3.10", "3.11", "3.12", "3.13"] + + steps: + - uses: actions/checkout@v3 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install poetry + poetry install --with dev + - name: Analysing the code with pylint + run: | + poetry run pylint $(git ls-files '*.py') + continue-on-error: true + - name: Test with pytest + run: | + poetry run coverage run -m pytest -v -s + - name: Generate Coverage Report + run: | + poetry run coverage report -m \ No newline at end of file diff --git a/.github/workflows/pythonpackage.yml b/.github/workflows/pythonpackage.yml deleted file mode 100644 index d102b17..0000000 --- a/.github/workflows/pythonpackage.yml +++ /dev/null @@ -1,46 +0,0 @@ -# This workflow will install Python dependencies, run tests and lint with a variety of Python versions -# For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions - -name: Python package - -on: - push: - branches: [ develop ] - pull_request: - branches: [ develop ] - -jobs: - build-ubuntu: - - runs-on: ubuntu-latest - strategy: - matrix: - python-version: ['3.10'] - - steps: - - uses: actions/checkout@v2 - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v2 - with: - python-version: ${{ matrix.python-version }} - - name: Install dependencies - run: | - python -m pip install --upgrade pip - pip install flake8 - if [ -f requirements.txt ]; then pip install -r requirements.txt; fi - - name: Lint with flake8 - run: | - # stop the build if there are Python syntax errors or undefined names - flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics - # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide - flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics - - name: Test setup & install - run: | - pip install build twine - python3 -m build - twine check dist/*.whl - python3 -m pip install dist/*.whl - - name: Test run - run: | - gitlab-watchman --version - gitlab-watchman --help diff --git a/.github/workflows/pythonpublish.yml b/.github/workflows/pythonpublish.yml deleted file mode 100644 index 119e458..0000000 --- a/.github/workflows/pythonpublish.yml +++ /dev/null @@ -1,30 +0,0 @@ -# This workflows will upload a Python Package using Twine when a release is created -# For more information see: https://help.github.com/en/actions/language-and-framework-guides/using-python-with-github-actions#publishing-to-package-registries - -name: Upload Python Package - -on: - push: - branches: [ master ] - -jobs: - deploy: - - runs-on: ubuntu-latest - - steps: - - uses: actions/checkout@v2 - - name: Set up Python - uses: actions/setup-python@v2 - with: - python-version: '3.x' - - name: Install dependencies - run: | - python3 -m pip install --upgrade pip twine build - - name: Build and publish - env: - TWINE_USERNAME: ${{ '__token__' }} - TWINE_PASSWORD: ${{ secrets.PYPI_TOKEN }} - run: | - python3 -m build - twine upload dist/* diff --git a/CHANGELOG.md b/CHANGELOG.md index d324abc..60877d5 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,28 @@ +## [3.1.0] - 2024-11-18 +### Added +- Signatures now loaded into memory instead of being saved to disk. This allows for running on read-only filesystems. +- Ability to disable signatures by their ID in the watchman.conf config file. + - These signatures will not be used when running Slack Watchman + - Signature IDs for each signature can be found in the Watchman Signatures repository +- Tests for Docker build +- Enhanced deduplication of findings + - The same match should not be returned multiple times within the same scope. E.g. if a token is found in a commit, it should not be returned multiple times in the same commit. +- All dates are now converted and logged in UTC +- Unit tests added for models and utils + +### Changed +- Package management and deployment moved to Poetry +- Docker build process improved using multi-stage builds. The Dockerfile now doesn't contain any unnecessary files, and is much smaller. +- Refactor to separate GitLab client and Watchman processing into modules +- Refactor to implement [python-gitlab](https://python-gitlab.readthedocs.io/) library for GitLab API calls, instead of the custom client used previously. + - This change gives more efficient and easier to read code, is more reliable, and also allows for enhancements to be added more easily in the future. + +### Fixed +- Error when searching wiki-blobs + - There would often be failures when trying to find projects or groups associated with blobs. This is now fixed by adding logic to check if the blob is associated with a project or group, and get the correct information accordingly. +- URL encoding for wiki-blobs where the URL contains special characters +- Error when enumerating pages when there is no `X-Total-Pages` header + ## [3.0.0] - 2023-05-15 This major version release brings multiple updates to GitLab Watchman in usability, functionality and behind the scenes improvements. ### Added diff --git a/Dockerfile b/Dockerfile index 0e38712..c27ccc9 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,13 +1,20 @@ # syntax=docker/dockerfile:1 +FROM python:3.12-slim-bullseye AS builder +WORKDIR /opt/gitlab-watchman +COPY . . +RUN pip install poetry +RUN poetry config virtualenvs.create false && \ + poetry install --no-dev && \ + poetry build -FROM python:3.10 -COPY . /opt/gitlab-watchman +FROM python:3.12-slim-bullseye WORKDIR /opt/gitlab-watchman -ENV PYTHONPATH=/opt/gitlab-watchman GITLAB_WATCHMAN_TOKEN="" GITLAB_WATCHMAN_URL="" -RUN pip3 install -r requirements.txt build && \ - chmod -R 700 . && \ - python3 -m build && \ - python3 -m pip install dist/*.whl +COPY --from=builder /opt/gitlab-watchman/dist/*.whl /opt/gitlab-watchman/dist/ +COPY --from=builder /opt/gitlab-watchman/pyproject.toml /opt/gitlab-watchman/poetry.lock /opt/gitlab-watchman/ +ENV PYTHONPATH=/opt/gitlab-watchman \ + GITLAB_WATCHMAN_TOKEN="" \ + GITLAB_WATCHMAN_URL="" +RUN pip install dist/*.whl && \ + chmod -R 700 . STOPSIGNAL SIGINT -WORKDIR /opt/gitlab-watchman ENTRYPOINT ["gitlab-watchman"] \ No newline at end of file diff --git a/README.md b/README.md index 02af30f..91c508d 100644 --- a/README.md +++ b/README.md @@ -55,6 +55,18 @@ GitLab Watchman can enumerate potentially useful information from a GitLab insta ### Signatures GitLab Watchman uses custom YAML signatures to detect matches in GitLab. These signatures are pulled from the central [Watchman Signatures repository](https://github.com/PaperMtn/watchman-signatures). Slack Watchman automatically updates its signature base at runtime to ensure its using the latest signatures to detect secrets. +#### Suppressing Signatures +You can define signatures that you want to disable when running GitLab Watchman by adding their IDs to the `disabled_signatures` section of the `watchman.conf` file. For example: + +```yaml +gitlab_watchman: + disabled_signatures: + - tokens_generic_bearer_tokens + - tokens_generic_access_tokens +``` + +You can find the ID of a signature in the individual YAML files in [Watchman Signatures repository](https://github.com/PaperMtn/watchman-signatures). + ### Logging GitLab Watchman gives the following logging options: @@ -106,6 +118,16 @@ You also need to provide the URL of your GitLab instance. #### Providing token & URL GitLab Watchman will get the GitLab token and URL from the environment variables `GITLAB_WATCHMAN_TOKEN` and `GITLAB_WATCHMAN_URL`. +### watchman.conf file +Configuration options can be passed in a file named `watchman.conf` which must be stored in your home directory. The file should follow the YAML format, and should look like below: +```yaml +gitlab_watchman: + disabled_signatures: + - tokens_generic_bearer_tokens + - tokens_generic_access_tokens +``` +GitLab Watchman will look for this file at runtime, and use the configuration options from here. + ## Installation You can install the latest stable version via pip: diff --git a/poetry.lock b/poetry.lock new file mode 100644 index 0000000..31ecab8 --- /dev/null +++ b/poetry.lock @@ -0,0 +1,580 @@ +# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. + +[[package]] +name = "astroid" +version = "3.3.5" +description = "An abstract syntax tree for Python with inference support." +optional = false +python-versions = ">=3.9.0" +files = [ + {file = "astroid-3.3.5-py3-none-any.whl", hash = "sha256:a9d1c946ada25098d790e079ba2a1b112157278f3fb7e718ae6a9252f5835dc8"}, + {file = "astroid-3.3.5.tar.gz", hash = "sha256:5cfc40ae9f68311075d27ef68a4841bdc5cc7f6cf86671b49f00607d30188e2d"}, +] + +[package.dependencies] +typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.11\""} + +[[package]] +name = "certifi" +version = "2024.8.30" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8"}, + {file = "certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9"}, +] + +[[package]] +name = "charset-normalizer" +version = "3.4.0" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:4f9fc98dad6c2eaa32fc3af1417d95b5e3d08aff968df0cd320066def971f9a6"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0de7b687289d3c1b3e8660d0741874abe7888100efe14bd0f9fd7141bcbda92b"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5ed2e36c3e9b4f21dd9422f6893dec0abf2cca553af509b10cd630f878d3eb99"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40d3ff7fc90b98c637bda91c89d51264a3dcf210cade3a2c6f838c7268d7a4ca"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1110e22af8ca26b90bd6364fe4c763329b0ebf1ee213ba32b68c73de5752323d"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:86f4e8cca779080f66ff4f191a685ced73d2f72d50216f7112185dc02b90b9b7"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f683ddc7eedd742e2889d2bfb96d69573fde1d92fcb811979cdb7165bb9c7d3"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:27623ba66c183eca01bf9ff833875b459cad267aeeb044477fedac35e19ba907"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f606a1881d2663630ea5b8ce2efe2111740df4b687bd78b34a8131baa007f79b"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0b309d1747110feb25d7ed6b01afdec269c647d382c857ef4663bbe6ad95a912"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:136815f06a3ae311fae551c3df1f998a1ebd01ddd424aa5603a4336997629e95"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:14215b71a762336254351b00ec720a8e85cada43b987da5a042e4ce3e82bd68e"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:79983512b108e4a164b9c8d34de3992f76d48cadc9554c9e60b43f308988aabe"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-win32.whl", hash = "sha256:c94057af19bc953643a33581844649a7fdab902624d2eb739738a30e2b3e60fc"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:55f56e2ebd4e3bc50442fbc0888c9d8c94e4e06a933804e2af3e89e2f9c1c749"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0d99dd8ff461990f12d6e42c7347fd9ab2532fb70e9621ba520f9e8637161d7c"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c57516e58fd17d03ebe67e181a4e4e2ccab1168f8c2976c6a334d4f819fe5944"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6dba5d19c4dfab08e58d5b36304b3f92f3bd5d42c1a3fa37b5ba5cdf6dfcbcee"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf4475b82be41b07cc5e5ff94810e6a01f276e37c2d55571e3fe175e467a1a1c"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce031db0408e487fd2775d745ce30a7cd2923667cf3b69d48d219f1d8f5ddeb6"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ff4e7cdfdb1ab5698e675ca622e72d58a6fa2a8aa58195de0c0061288e6e3ea"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3710a9751938947e6327ea9f3ea6332a09bf0ba0c09cae9cb1f250bd1f1549bc"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82357d85de703176b5587dbe6ade8ff67f9f69a41c0733cf2425378b49954de5"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:47334db71978b23ebcf3c0f9f5ee98b8d65992b65c9c4f2d34c2eaf5bcaf0594"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8ce7fd6767a1cc5a92a639b391891bf1c268b03ec7e021c7d6d902285259685c"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f1a2f519ae173b5b6a2c9d5fa3116ce16e48b3462c8b96dfdded11055e3d6365"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:63bc5c4ae26e4bc6be6469943b8253c0fd4e4186c43ad46e713ea61a0ba49129"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bcb4f8ea87d03bc51ad04add8ceaf9b0f085ac045ab4d74e73bbc2dc033f0236"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-win32.whl", hash = "sha256:9ae4ef0b3f6b41bad6366fb0ea4fc1d7ed051528e113a60fa2a65a9abb5b1d99"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:cee4373f4d3ad28f1ab6290684d8e2ebdb9e7a1b74fdc39e4c211995f77bec27"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0713f3adb9d03d49d365b70b84775d0a0d18e4ab08d12bc46baa6132ba78aaf6"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:de7376c29d95d6719048c194a9cf1a1b0393fbe8488a22008610b0361d834ecf"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4a51b48f42d9358460b78725283f04bddaf44a9358197b889657deba38f329db"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b295729485b06c1a0683af02a9e42d2caa9db04a373dc38a6a58cdd1e8abddf1"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ee803480535c44e7f5ad00788526da7d85525cfefaf8acf8ab9a310000be4b03"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d59d125ffbd6d552765510e3f31ed75ebac2c7470c7274195b9161a32350284"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cda06946eac330cbe6598f77bb54e690b4ca93f593dee1568ad22b04f347c15"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07afec21bbbbf8a5cc3651aa96b980afe2526e7f048fdfb7f1014d84acc8b6d8"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6b40e8d38afe634559e398cc32b1472f376a4099c75fe6299ae607e404c033b2"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b8dcd239c743aa2f9c22ce674a145e0a25cb1566c495928440a181ca1ccf6719"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:84450ba661fb96e9fd67629b93d2941c871ca86fc38d835d19d4225ff946a631"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:44aeb140295a2f0659e113b31cfe92c9061622cadbc9e2a2f7b8ef6b1e29ef4b"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1db4e7fefefd0f548d73e2e2e041f9df5c59e178b4c72fbac4cc6f535cfb1565"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-win32.whl", hash = "sha256:5726cf76c982532c1863fb64d8c6dd0e4c90b6ece9feb06c9f202417a31f7dd7"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:b197e7094f232959f8f20541ead1d9862ac5ebea1d58e9849c1bf979255dfac9"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:dd4eda173a9fcccb5f2e2bd2a9f423d180194b1bf17cf59e3269899235b2a114"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e9e3c4c9e1ed40ea53acf11e2a386383c3304212c965773704e4603d589343ed"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:92a7e36b000bf022ef3dbb9c46bfe2d52c047d5e3f3343f43204263c5addc250"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:54b6a92d009cbe2fb11054ba694bc9e284dad30a26757b1e372a1fdddaf21920"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ffd9493de4c922f2a38c2bf62b831dcec90ac673ed1ca182fe11b4d8e9f2a64"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:35c404d74c2926d0287fbd63ed5d27eb911eb9e4a3bb2c6d294f3cfd4a9e0c23"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4796efc4faf6b53a18e3d46343535caed491776a22af773f366534056c4e1fbc"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7fdd52961feb4c96507aa649550ec2a0d527c086d284749b2f582f2d40a2e0d"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:92db3c28b5b2a273346bebb24857fda45601aef6ae1c011c0a997106581e8a88"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ab973df98fc99ab39080bfb0eb3a925181454d7c3ac8a1e695fddfae696d9e90"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4b67fdab07fdd3c10bb21edab3cbfe8cf5696f453afce75d815d9d7223fbe88b"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:aa41e526a5d4a9dfcfbab0716c7e8a1b215abd3f3df5a45cf18a12721d31cb5d"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ffc519621dce0c767e96b9c53f09c5d215578e10b02c285809f76509a3931482"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-win32.whl", hash = "sha256:f19c1585933c82098c2a520f8ec1227f20e339e33aca8fa6f956f6691b784e67"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:707b82d19e65c9bd28b81dde95249b07bf9f5b90ebe1ef17d9b57473f8a64b7b"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:dbe03226baf438ac4fda9e2d0715022fd579cb641c4cf639fa40d53b2fe6f3e2"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd9a8bd8900e65504a305bf8ae6fa9fbc66de94178c420791d0293702fce2df7"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8831399554b92b72af5932cdbbd4ddc55c55f631bb13ff8fe4e6536a06c5c51"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a14969b8691f7998e74663b77b4c36c0337cb1df552da83d5c9004a93afdb574"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dcaf7c1524c0542ee2fc82cc8ec337f7a9f7edee2532421ab200d2b920fc97cf"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:425c5f215d0eecee9a56cdb703203dda90423247421bf0d67125add85d0c4455"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:d5b054862739d276e09928de37c79ddeec42a6e1bfc55863be96a36ba22926f6"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:f3e73a4255342d4eb26ef6df01e3962e73aa29baa3124a8e824c5d3364a65748"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:2f6c34da58ea9c1a9515621f4d9ac379871a8f21168ba1b5e09d74250de5ad62"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:f09cb5a7bbe1ecae6e87901a2eb23e0256bb524a79ccc53eb0b7629fbe7677c4"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:0099d79bdfcf5c1f0c2c72f91516702ebf8b0b8ddd8905f97a8aecf49712c621"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-win32.whl", hash = "sha256:9c98230f5042f4945f957d006edccc2af1e03ed5e37ce7c373f00a5a4daa6149"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-win_amd64.whl", hash = "sha256:62f60aebecfc7f4b82e3f639a7d1433a20ec32824db2199a11ad4f5e146ef5ee"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:af73657b7a68211996527dbfeffbb0864e043d270580c5aef06dc4b659a4b578"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cab5d0b79d987c67f3b9e9c53f54a61360422a5a0bc075f43cab5621d530c3b6"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9289fd5dddcf57bab41d044f1756550f9e7cf0c8e373b8cdf0ce8773dc4bd417"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b493a043635eb376e50eedf7818f2f322eabbaa974e948bd8bdd29eb7ef2a51"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fa2566ca27d67c86569e8c85297aaf413ffab85a8960500f12ea34ff98e4c41"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8e538f46104c815be19c975572d74afb53f29650ea2025bbfaef359d2de2f7f"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fd30dc99682dc2c603c2b315bded2799019cea829f8bf57dc6b61efde6611c8"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2006769bd1640bdf4d5641c69a3d63b71b81445473cac5ded39740a226fa88ab"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:dc15e99b2d8a656f8e666854404f1ba54765871104e50c8e9813af8a7db07f12"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:ab2e5bef076f5a235c3774b4f4028a680432cded7cad37bba0fd90d64b187d19"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:4ec9dd88a5b71abfc74e9df5ebe7921c35cbb3b641181a531ca65cdb5e8e4dea"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:43193c5cda5d612f247172016c4bb71251c784d7a4d9314677186a838ad34858"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:aa693779a8b50cd97570e5a0f343538a8dbd3e496fa5dcb87e29406ad0299654"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-win32.whl", hash = "sha256:7706f5850360ac01d80c89bcef1640683cc12ed87f42579dab6c5d3ed6888613"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:c3e446d253bd88f6377260d07c895816ebf33ffffd56c1c792b13bff9c3e1ade"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:980b4f289d1d90ca5efcf07958d3eb38ed9c0b7676bf2831a54d4f66f9c27dfa"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f28f891ccd15c514a0981f3b9db9aa23d62fe1a99997512b0491d2ed323d229a"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8aacce6e2e1edcb6ac625fb0f8c3a9570ccc7bfba1f63419b3769ccf6a00ed0"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd7af3717683bea4c87acd8c0d3d5b44d56120b26fd3f8a692bdd2d5260c620a"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ff2ed8194587faf56555927b3aa10e6fb69d931e33953943bc4f837dfee2242"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e91f541a85298cf35433bf66f3fab2a4a2cff05c127eeca4af174f6d497f0d4b"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:309a7de0a0ff3040acaebb35ec45d18db4b28232f21998851cfa709eeff49d62"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:285e96d9d53422efc0d7a17c60e59f37fbf3dfa942073f666db4ac71e8d726d0"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:5d447056e2ca60382d460a604b6302d8db69476fd2015c81e7c35417cfabe4cd"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:20587d20f557fe189b7947d8e7ec5afa110ccf72a3128d61a2a387c3313f46be"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:130272c698667a982a5d0e626851ceff662565379baf0ff2cc58067b81d4f11d"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:ab22fbd9765e6954bc0bcff24c25ff71dcbfdb185fcdaca49e81bac68fe724d3"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7782afc9b6b42200f7362858f9e73b1f8316afb276d316336c0ec3bd73312742"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-win32.whl", hash = "sha256:2de62e8801ddfff069cd5c504ce3bc9672b23266597d4e4f50eda28846c322f2"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:95c3c157765b031331dd4db3c775e58deaee050a3042fcad72cbc4189d7c8dca"}, + {file = "charset_normalizer-3.4.0-py3-none-any.whl", hash = "sha256:fe9f97feb71aa9896b81973a7bbada8c49501dc73e58a10fcef6663af95e5079"}, + {file = "charset_normalizer-3.4.0.tar.gz", hash = "sha256:223217c3d4f82c3ac5e29032b3f1c2eb0fb591b72161f86d93f5719079dae93e"}, +] + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "coverage" +version = "7.6.4" +description = "Code coverage measurement for Python" +optional = false +python-versions = ">=3.9" +files = [ + {file = "coverage-7.6.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5f8ae553cba74085db385d489c7a792ad66f7f9ba2ee85bfa508aeb84cf0ba07"}, + {file = "coverage-7.6.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8165b796df0bd42e10527a3f493c592ba494f16ef3c8b531288e3d0d72c1f6f0"}, + {file = "coverage-7.6.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7c8b95bf47db6d19096a5e052ffca0a05f335bc63cef281a6e8fe864d450a72"}, + {file = "coverage-7.6.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8ed9281d1b52628e81393f5eaee24a45cbd64965f41857559c2b7ff19385df51"}, + {file = "coverage-7.6.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0809082ee480bb8f7416507538243c8863ac74fd8a5d2485c46f0f7499f2b491"}, + {file = "coverage-7.6.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d541423cdd416b78626b55f123412fcf979d22a2c39fce251b350de38c15c15b"}, + {file = "coverage-7.6.4-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:58809e238a8a12a625c70450b48e8767cff9eb67c62e6154a642b21ddf79baea"}, + {file = "coverage-7.6.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:c9b8e184898ed014884ca84c70562b4a82cbc63b044d366fedc68bc2b2f3394a"}, + {file = "coverage-7.6.4-cp310-cp310-win32.whl", hash = "sha256:6bd818b7ea14bc6e1f06e241e8234508b21edf1b242d49831831a9450e2f35fa"}, + {file = "coverage-7.6.4-cp310-cp310-win_amd64.whl", hash = "sha256:06babbb8f4e74b063dbaeb74ad68dfce9186c595a15f11f5d5683f748fa1d172"}, + {file = "coverage-7.6.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:73d2b73584446e66ee633eaad1a56aad577c077f46c35ca3283cd687b7715b0b"}, + {file = "coverage-7.6.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:51b44306032045b383a7a8a2c13878de375117946d68dcb54308111f39775a25"}, + {file = "coverage-7.6.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b3fb02fe73bed561fa12d279a417b432e5b50fe03e8d663d61b3d5990f29546"}, + {file = "coverage-7.6.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ed8fe9189d2beb6edc14d3ad19800626e1d9f2d975e436f84e19efb7fa19469b"}, + {file = "coverage-7.6.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b369ead6527d025a0fe7bd3864e46dbee3aa8f652d48df6174f8d0bac9e26e0e"}, + {file = "coverage-7.6.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ade3ca1e5f0ff46b678b66201f7ff477e8fa11fb537f3b55c3f0568fbfe6e718"}, + {file = "coverage-7.6.4-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:27fb4a050aaf18772db513091c9c13f6cb94ed40eacdef8dad8411d92d9992db"}, + {file = "coverage-7.6.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4f704f0998911abf728a7783799444fcbbe8261c4a6c166f667937ae6a8aa522"}, + {file = "coverage-7.6.4-cp311-cp311-win32.whl", hash = "sha256:29155cd511ee058e260db648b6182c419422a0d2e9a4fa44501898cf918866cf"}, + {file = "coverage-7.6.4-cp311-cp311-win_amd64.whl", hash = "sha256:8902dd6a30173d4ef09954bfcb24b5d7b5190cf14a43170e386979651e09ba19"}, + {file = "coverage-7.6.4-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:12394842a3a8affa3ba62b0d4ab7e9e210c5e366fbac3e8b2a68636fb19892c2"}, + {file = "coverage-7.6.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2b6b4c83d8e8ea79f27ab80778c19bc037759aea298da4b56621f4474ffeb117"}, + {file = "coverage-7.6.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d5b8007f81b88696d06f7df0cb9af0d3b835fe0c8dbf489bad70b45f0e45613"}, + {file = "coverage-7.6.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b57b768feb866f44eeed9f46975f3d6406380275c5ddfe22f531a2bf187eda27"}, + {file = "coverage-7.6.4-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5915fcdec0e54ee229926868e9b08586376cae1f5faa9bbaf8faf3561b393d52"}, + {file = "coverage-7.6.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0b58c672d14f16ed92a48db984612f5ce3836ae7d72cdd161001cc54512571f2"}, + {file = "coverage-7.6.4-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:2fdef0d83a2d08d69b1f2210a93c416d54e14d9eb398f6ab2f0a209433db19e1"}, + {file = "coverage-7.6.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:8cf717ee42012be8c0cb205dbbf18ffa9003c4cbf4ad078db47b95e10748eec5"}, + {file = "coverage-7.6.4-cp312-cp312-win32.whl", hash = "sha256:7bb92c539a624cf86296dd0c68cd5cc286c9eef2d0c3b8b192b604ce9de20a17"}, + {file = "coverage-7.6.4-cp312-cp312-win_amd64.whl", hash = "sha256:1032e178b76a4e2b5b32e19d0fd0abbce4b58e77a1ca695820d10e491fa32b08"}, + {file = "coverage-7.6.4-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:023bf8ee3ec6d35af9c1c6ccc1d18fa69afa1cb29eaac57cb064dbb262a517f9"}, + {file = "coverage-7.6.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:b0ac3d42cb51c4b12df9c5f0dd2f13a4f24f01943627120ec4d293c9181219ba"}, + {file = "coverage-7.6.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f8fe4984b431f8621ca53d9380901f62bfb54ff759a1348cd140490ada7b693c"}, + {file = "coverage-7.6.4-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5fbd612f8a091954a0c8dd4c0b571b973487277d26476f8480bfa4b2a65b5d06"}, + {file = "coverage-7.6.4-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dacbc52de979f2823a819571f2e3a350a7e36b8cb7484cdb1e289bceaf35305f"}, + {file = "coverage-7.6.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:dab4d16dfef34b185032580e2f2f89253d302facba093d5fa9dbe04f569c4f4b"}, + {file = "coverage-7.6.4-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:862264b12ebb65ad8d863d51f17758b1684560b66ab02770d4f0baf2ff75da21"}, + {file = "coverage-7.6.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5beb1ee382ad32afe424097de57134175fea3faf847b9af002cc7895be4e2a5a"}, + {file = "coverage-7.6.4-cp313-cp313-win32.whl", hash = "sha256:bf20494da9653f6410213424f5f8ad0ed885e01f7e8e59811f572bdb20b8972e"}, + {file = "coverage-7.6.4-cp313-cp313-win_amd64.whl", hash = "sha256:182e6cd5c040cec0a1c8d415a87b67ed01193ed9ad458ee427741c7d8513d963"}, + {file = "coverage-7.6.4-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:a181e99301a0ae128493a24cfe5cfb5b488c4e0bf2f8702091473d033494d04f"}, + {file = "coverage-7.6.4-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:df57bdbeffe694e7842092c5e2e0bc80fff7f43379d465f932ef36f027179806"}, + {file = "coverage-7.6.4-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0bcd1069e710600e8e4cf27f65c90c7843fa8edfb4520fb0ccb88894cad08b11"}, + {file = "coverage-7.6.4-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:99b41d18e6b2a48ba949418db48159d7a2e81c5cc290fc934b7d2380515bd0e3"}, + {file = "coverage-7.6.4-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a6b1e54712ba3474f34b7ef7a41e65bd9037ad47916ccb1cc78769bae324c01a"}, + {file = "coverage-7.6.4-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:53d202fd109416ce011578f321460795abfe10bb901b883cafd9b3ef851bacfc"}, + {file = "coverage-7.6.4-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:c48167910a8f644671de9f2083a23630fbf7a1cb70ce939440cd3328e0919f70"}, + {file = "coverage-7.6.4-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:cc8ff50b50ce532de2fa7a7daae9dd12f0a699bfcd47f20945364e5c31799fef"}, + {file = "coverage-7.6.4-cp313-cp313t-win32.whl", hash = "sha256:b8d3a03d9bfcaf5b0141d07a88456bb6a4c3ce55c080712fec8418ef3610230e"}, + {file = "coverage-7.6.4-cp313-cp313t-win_amd64.whl", hash = "sha256:f3ddf056d3ebcf6ce47bdaf56142af51bb7fad09e4af310241e9db7a3a8022e1"}, + {file = "coverage-7.6.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9cb7fa111d21a6b55cbf633039f7bc2749e74932e3aa7cb7333f675a58a58bf3"}, + {file = "coverage-7.6.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:11a223a14e91a4693d2d0755c7a043db43d96a7450b4f356d506c2562c48642c"}, + {file = "coverage-7.6.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a413a096c4cbac202433c850ee43fa326d2e871b24554da8327b01632673a076"}, + {file = "coverage-7.6.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:00a1d69c112ff5149cabe60d2e2ee948752c975d95f1e1096742e6077affd376"}, + {file = "coverage-7.6.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f76846299ba5c54d12c91d776d9605ae33f8ae2b9d1d3c3703cf2db1a67f2c0"}, + {file = "coverage-7.6.4-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:fe439416eb6380de434886b00c859304338f8b19f6f54811984f3420a2e03858"}, + {file = "coverage-7.6.4-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:0294ca37f1ba500667b1aef631e48d875ced93ad5e06fa665a3295bdd1d95111"}, + {file = "coverage-7.6.4-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:6f01ba56b1c0e9d149f9ac85a2f999724895229eb36bd997b61e62999e9b0901"}, + {file = "coverage-7.6.4-cp39-cp39-win32.whl", hash = "sha256:bc66f0bf1d7730a17430a50163bb264ba9ded56739112368ba985ddaa9c3bd09"}, + {file = "coverage-7.6.4-cp39-cp39-win_amd64.whl", hash = "sha256:c481b47f6b5845064c65a7bc78bc0860e635a9b055af0df46fdf1c58cebf8e8f"}, + {file = "coverage-7.6.4-pp39.pp310-none-any.whl", hash = "sha256:3c65d37f3a9ebb703e710befdc489a38683a5b152242664b973a7b7b22348a4e"}, + {file = "coverage-7.6.4.tar.gz", hash = "sha256:29fc0f17b1d3fea332f8001d4558f8214af7f1d87a345f3a133c901d60347c73"}, +] + +[package.extras] +toml = ["tomli"] + +[[package]] +name = "dill" +version = "0.3.9" +description = "serialize all of Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "dill-0.3.9-py3-none-any.whl", hash = "sha256:468dff3b89520b474c0397703366b7b95eebe6303f108adf9b19da1f702be87a"}, + {file = "dill-0.3.9.tar.gz", hash = "sha256:81aa267dddf68cbfe8029c42ca9ec6a4ab3b22371d1c450abc54422577b4512c"}, +] + +[package.extras] +graph = ["objgraph (>=1.7.2)"] +profile = ["gprof2dot (>=2022.7.29)"] + +[[package]] +name = "exceptiongroup" +version = "1.2.2" +description = "Backport of PEP 654 (exception groups)" +optional = false +python-versions = ">=3.7" +files = [ + {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, + {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, +] + +[package.extras] +test = ["pytest (>=6)"] + +[[package]] +name = "idna" +version = "3.10" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.6" +files = [ + {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, + {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, +] + +[package.extras] +all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] + +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + +[[package]] +name = "isort" +version = "5.13.2" +description = "A Python utility / library to sort Python imports." +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "isort-5.13.2-py3-none-any.whl", hash = "sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6"}, + {file = "isort-5.13.2.tar.gz", hash = "sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109"}, +] + +[package.extras] +colors = ["colorama (>=0.4.6)"] + +[[package]] +name = "mccabe" +version = "0.7.0" +description = "McCabe checker, plugin for flake8" +optional = false +python-versions = ">=3.6" +files = [ + {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, + {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, +] + +[[package]] +name = "packaging" +version = "24.2" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759"}, + {file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"}, +] + +[[package]] +name = "platformdirs" +version = "4.3.6" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." +optional = false +python-versions = ">=3.8" +files = [ + {file = "platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb"}, + {file = "platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907"}, +] + +[package.extras] +docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.2)", "pytest-cov (>=5)", "pytest-mock (>=3.14)"] +type = ["mypy (>=1.11.2)"] + +[[package]] +name = "pluggy" +version = "1.5.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, + {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "pylint" +version = "3.3.1" +description = "python code static checker" +optional = false +python-versions = ">=3.9.0" +files = [ + {file = "pylint-3.3.1-py3-none-any.whl", hash = "sha256:2f846a466dd023513240bc140ad2dd73bfc080a5d85a710afdb728c420a5a2b9"}, + {file = "pylint-3.3.1.tar.gz", hash = "sha256:9f3dcc87b1203e612b78d91a896407787e708b3f189b5fa0b307712d49ff0c6e"}, +] + +[package.dependencies] +astroid = ">=3.3.4,<=3.4.0-dev0" +colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} +dill = [ + {version = ">=0.2", markers = "python_version < \"3.11\""}, + {version = ">=0.3.7", markers = "python_version >= \"3.12\""}, + {version = ">=0.3.6", markers = "python_version >= \"3.11\" and python_version < \"3.12\""}, +] +isort = ">=4.2.5,<5.13.0 || >5.13.0,<6" +mccabe = ">=0.6,<0.8" +platformdirs = ">=2.2.0" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +tomlkit = ">=0.10.1" + +[package.extras] +spelling = ["pyenchant (>=3.2,<4.0)"] +testutils = ["gitpython (>3)"] + +[[package]] +name = "pytest" +version = "8.3.3" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest-8.3.3-py3-none-any.whl", hash = "sha256:a6853c7375b2663155079443d2e45de913a911a11d669df02a50814944db57b2"}, + {file = "pytest-8.3.3.tar.gz", hash = "sha256:70b98107bd648308a7952b06e6ca9a50bc660be218d53c257cc1fc94fda10181"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=1.5,<2" +tomli = {version = ">=1", markers = "python_version < \"3.11\""} + +[package.extras] +dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] + +[[package]] +name = "python-gitlab" +version = "5.0.0" +description = "A python wrapper for the GitLab API" +optional = false +python-versions = ">=3.9.0" +files = [ + {file = "python_gitlab-5.0.0-py3-none-any.whl", hash = "sha256:2af86a1655385c2afe13e33e79555d5394f8c7912ade04bb6e916e3d21a8716f"}, + {file = "python_gitlab-5.0.0.tar.gz", hash = "sha256:d156b9810d2a5c2916226cecf354956286a9e8133ee707d6584e40f126329956"}, +] + +[package.dependencies] +requests = ">=2.32.0" +requests-toolbelt = ">=1.0.0" + +[package.extras] +autocompletion = ["argcomplete (>=1.10.0,<3)"] +graphql = ["gql[httpx] (>=3.5.0,<4)"] +yaml = ["PyYaml (>=6.0.1)"] + +[[package]] +name = "pytz" +version = "2024.2" +description = "World timezone definitions, modern and historical" +optional = false +python-versions = "*" +files = [ + {file = "pytz-2024.2-py2.py3-none-any.whl", hash = "sha256:31c7c1817eb7fae7ca4b8c7ee50c72f93aa2dd863de768e1ef4245d426aa0725"}, + {file = "pytz-2024.2.tar.gz", hash = "sha256:2aa355083c50a0f93fa581709deac0c9ad65cca8a9e9beac660adcbd493c798a"}, +] + +[[package]] +name = "pyyaml" +version = "6.0.2" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, + {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"}, + {file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"}, + {file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"}, + {file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"}, + {file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"}, + {file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"}, + {file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"}, + {file = "PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"}, + {file = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"}, + {file = "PyYAML-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083"}, + {file = "PyYAML-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706"}, + {file = "PyYAML-6.0.2-cp38-cp38-win32.whl", hash = "sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a"}, + {file = "PyYAML-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725"}, + {file = "PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631"}, + {file = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"}, + {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, +] + +[[package]] +name = "requests" +version = "2.32.3" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.8" +files = [ + {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, + {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "requests-toolbelt" +version = "1.0.0" +description = "A utility belt for advanced users of python-requests" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "requests-toolbelt-1.0.0.tar.gz", hash = "sha256:7681a0a3d047012b5bdc0ee37d7f8f07ebe76ab08caeccfc3921ce23c88d5bc6"}, + {file = "requests_toolbelt-1.0.0-py2.py3-none-any.whl", hash = "sha256:cccfdd665f0a24fcf4726e690f65639d272bb0637b9b92dfd91a5568ccf6bd06"}, +] + +[package.dependencies] +requests = ">=2.0.1,<3.0.0" + +[[package]] +name = "tomli" +version = "2.1.0" +description = "A lil' TOML parser" +optional = false +python-versions = ">=3.8" +files = [ + {file = "tomli-2.1.0-py3-none-any.whl", hash = "sha256:a5c57c3d1c56f5ccdf89f6523458f60ef716e210fc47c4cfb188c5ba473e0391"}, + {file = "tomli-2.1.0.tar.gz", hash = "sha256:3f646cae2aec94e17d04973e4249548320197cfabdf130015d023de4b74d8ab8"}, +] + +[[package]] +name = "tomlkit" +version = "0.13.2" +description = "Style preserving TOML library" +optional = false +python-versions = ">=3.8" +files = [ + {file = "tomlkit-0.13.2-py3-none-any.whl", hash = "sha256:7a974427f6e119197f670fbbbeae7bef749a6c14e793db934baefc1b5f03efde"}, + {file = "tomlkit-0.13.2.tar.gz", hash = "sha256:fff5fe59a87295b278abd31bec92c15d9bc4a06885ab12bcea52c71119392e79"}, +] + +[[package]] +name = "typing-extensions" +version = "4.12.2" +description = "Backported and Experimental Type Hints for Python 3.8+" +optional = false +python-versions = ">=3.8" +files = [ + {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, + {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, +] + +[[package]] +name = "urllib3" +version = "2.2.3" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.8" +files = [ + {file = "urllib3-2.2.3-py3-none-any.whl", hash = "sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac"}, + {file = "urllib3-2.2.3.tar.gz", hash = "sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[metadata] +lock-version = "2.0" +python-versions = ">=3.10" +content-hash = "c6cda1a7c9bd412030e28b547095fbbafb515d81f7204ed41d4167e46d0492b1" diff --git a/pyproject.toml b/pyproject.toml index 374b58c..fce5be8 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,54 @@ -[build-system] -requires = [ - "setuptools>=42", - "wheel" +[tool.poetry] +name = "gitlab-watchman" +version = "3.1.0" +description = "Finding exposed secrets and personal data in GitLab" +authors = ["PaperMtn "] +license = "GPL-3.0" +readme = "README.md" +homepage = "https://github.com/PaperMtn/gitlab-watchman" +repository = "https://github.com/PaperMtn/gitlab-watchman" +keywords = [ "audit", "dlp", "gitlab", "gitlab-watchman", "watchman", "blue-team", "red-team", "threat-hunting" ] +classifiers = [ + "Intended Audience :: Information Technology", + "License :: OSI Approved :: GNU General Public License v3 (GPLv3)", +] + +[tool.poetry.urls] +"Blog" = "https://papermtn.co.uk/category/tools/gitlab-watchman/" + +[tool.poetry.dependencies] +python = ">=3.10" +colorama = "^0.4.6" +pyyaml = "^6.0.2" +requests = "^2.32.3" +python-gitlab = "^5.0.0" +pytz = "^2024.2" + +[tool.poetry.group.dev.dependencies] +pytest = "^8.3.3" +coverage = "^7.2.3" +pylint = "^3.3.1" + +[tool.poetry.scripts] +gitlab-watchman = "gitlab_watchman:main" + +[tool.pylint.messages_control] +max-line-length = 120 +max-attributes = 10 +max-args = 10 +disable = [ + "missing-module-docstring", + "too-few-public-methods", + "arguments-differ", + "logging-fstring-interpolation", + "no-else-return", + "no-else-raise", + "inconsistent-return-statements", + "broad-exception-caught", + "duplicate-code", ] -build-backend = "setuptools.build_meta" + + +[build-system] +requires = ["poetry-core"] +build-backend = "poetry.core.masonry.api" \ No newline at end of file diff --git a/requirements.txt b/requirements.txt deleted file mode 100644 index 06bbb4e..0000000 --- a/requirements.txt +++ /dev/null @@ -1,3 +0,0 @@ -requests -PyYAML -colorama diff --git a/setup.cfg b/setup.cfg deleted file mode 100644 index e47cf91..0000000 --- a/setup.cfg +++ /dev/null @@ -1,35 +0,0 @@ -[metadata] -name = gitlab-watchman -version = attr: gitlab_watchman.__version__.__version__ -description = Finding exposed secrets and personal data in GitLab -long_description = file: README.md, CHANGELOG.md -long_description_content_type = text/markdown -license = GPL-3.0 -url = https://github.com/PaperMtn/gitlab-watchman -author = PaperMtn -author_email = papermtn@protonmail.com -keywords = audit, gitlab, gitlab-watchman, watchman, blue-team, red-team, threat-hunting -classifiers = - Intended Audience :: Information Technology - Topic :: Security - License :: OSI Approved :: GNU General Public License v3 (GPLv3) - Programming Language :: Python :: 3.10 - -[options] -zip_safe = False -package_dir = - = src -include_package_data = True -packages = find: -python_requires = >=3.10 -install_requires = - requests - PyYAML - colorama - -[options.entry_points] -console_scripts = - gitlab-watchman = gitlab_watchman:main - -[options.packages.find] -where = src diff --git a/src/gitlab_watchman/__init__.py b/src/gitlab_watchman/__init__.py index b761691..3f0e63e 100644 --- a/src/gitlab_watchman/__init__.py +++ b/src/gitlab_watchman/__init__.py @@ -1,133 +1,154 @@ import argparse import calendar +import datetime import multiprocessing import os import sys import time -import datetime import traceback -from pathlib import Path -from typing import List - -from . import gitlab_wrapper -from . import __version__ -from . import gw_logger -from . import signature_updater -from . import exceptions -from .models import ( +from dataclasses import dataclass +from importlib import metadata +from typing import List, Dict, Any + +import yaml + +from gitlab_watchman import watchman_processor +from gitlab_watchman.clients.gitlab_client import GitLabAPIClient +from gitlab_watchman.signature_downloader import SignatureDownloader +from gitlab_watchman.exceptions import ( + GitLabWatchmanError, + GitLabWatchmanGetObjectError, + GitLabWatchmanNotAuthorisedError, + GitLabWatchmanAuthenticationError, + ElasticsearchMissingError, + MissingEnvVarError, + MisconfiguredConfFileError +) +from gitlab_watchman.loggers import ( + JSONLogger, + StdoutLogger, + log_to_csv, + init_logger +) +from gitlab_watchman.models import ( signature, user, project, group ) -SIGNATURES_PATH = (Path(__file__).parents[2] / 'watchman-signatures').resolve() -OUTPUT_LOGGER = gw_logger.JSONLogger +@dataclass +class SearchArgs: + """ Dataclass to hold search arguments """ + gitlab_client: GitLabAPIClient + sig_list: List[signature.Signature] + timeframe: int + logging_type: str + log_handler: JSONLogger | StdoutLogger + debug: bool + verbose: bool + scopes: List[str] -def search(gitlab_connection: gitlab_wrapper.GitLabAPIClient, - sig: signature.Signature, - timeframe: int, - scope: str, - verbose: bool): + +def search(search_args: SearchArgs, sig: signature.Signature, scope: str): """ Use the appropriate search function to search GitLab based on the contents of the signature file. Output results to stdout Args: - gitlab_connection: GitLab API object + search_args: SearchArgs object sig: Signature object - timeframe: Timeframe to search for scope: What sort of GitLab objects to search - verbose: Whether to use verbose logging or not """ try: OUTPUT_LOGGER.log('INFO', f'Searching for {sig.name} in {scope}') - results = gitlab_wrapper.search( - gitlab=gitlab_connection, - log_handler=OUTPUT_LOGGER, + results = watchman_processor.search( + gitlab=search_args.gitlab_client, + logging_type=search_args.logging_type, + log_handler=search_args.log_handler, + debug=search_args.debug, sig=sig, scope=scope, - verbose=verbose, - timeframe=timeframe) + verbose=search_args.verbose, + timeframe=search_args.timeframe) if results: for log_data in results: - OUTPUT_LOGGER.log('NOTIFY', - log_data, - scope=scope, - severity=sig.severity, - detect_type=sig.name, - notify_type='result') - except exceptions.ElasticsearchMissingError as e: + OUTPUT_LOGGER.log( + 'NOTIFY', + log_data, + scope=scope, + severity=sig.severity, + detect_type=sig.name, + notify_type='result') + except ElasticsearchMissingError as e: OUTPUT_LOGGER.log('WARNING', e) OUTPUT_LOGGER.log('DEBUG', traceback.format_exc()) except Exception as e: raise e -def init_logger(logging_type: str, debug: bool) -> gw_logger.JSONLogger or gw_logger.StdoutLogger: - """ Create a logger object. Defaults to stdout if no option is given +def perform_search(search_args: SearchArgs): + """ Helper function to perform the search for each signature and each scope Args: - logging_type: Type of logging to use - debug: Whether to use debug level logging or not - Returns: - Logger object + search_args: SearchArgs object """ - if not logging_type or logging_type == 'stdout': - return gw_logger.StdoutLogger(debug=debug) - else: - return gw_logger.JSONLogger(debug=debug) - + for sig in search_args.sig_list: + if sig.scope: + for scope in search_args.scopes: + if scope in sig.scope: + search(search_args, sig, scope) -def load_signatures() -> List[signature.Signature]: - """ Load signatures from YAML files - Returns: - List containing loaded definitions as Signatures objects - """ - loaded_signatures = [] - try: - for root, dirs, files in os.walk(SIGNATURES_PATH): - for sig_file in files: - sig_path = (Path(root) / sig_file).resolve() - if sig_path.name.endswith('.yaml'): - loaded_def = signature.load_from_yaml(sig_path) - for sig in loaded_def: - if sig.status == 'enabled' and 'gitlab' in sig.watchman_apps: - loaded_signatures.append(sig) - return loaded_signatures - except Exception as e: - raise e - - -def validate_variables() -> bool: +def validate_variables() -> Dict[str, Any]: """ Validate whether GitLab Watchman environment variables have been set Returns: True if both variables are set """ - if os.environ.get('GITLAB_WATCHMAN_TOKEN') and os.environ.get('GITLAB_WATCHMAN_URL'): - return True - else: - try: - os.environ['GITLAB_WATCHMAN_TOKEN'] - except: - raise exceptions.MissingEnvVarError('GITLAB_WATCHMAN_TOKEN') + required_vars = ['GITLAB_WATCHMAN_TOKEN', 'GITLAB_WATCHMAN_URL'] + + for var in required_vars: + if var not in os.environ: + raise MissingEnvVarError(var) + path = f'{os.path.expanduser("~")}/watchman.conf' + if os.path.exists(path): try: - os.environ['GITLAB_WATCHMAN_URL'] - except: - raise exceptions.MissingEnvVarError('GITLAB_WATCHMAN_URL') + with open(path) as yaml_file: + conf_details = yaml.safe_load(yaml_file)['gitlab_watchman'] + return { + 'disabled_signatures': conf_details.get('disabled_signatures', []) + } + except Exception as e: + raise MisconfiguredConfFileError from e + return {} + + +def supress_disabled_signatures(signatures: List[signature.Signature], + disabled_signatures: List[str]) -> List[signature.Signature]: + """ Supress signatures that are disabled in the config file + Args: + signatures: List of signatures to filter + disabled_signatures: List of signatures to disable + Returns: + List of signatures with disabled signatures removed + """ + return [sig for sig in signatures if sig.id not in disabled_signatures] + +# pylint: disable=too-many-locals, missing-function-docstring, global-variable-undefined +# pylint: disable=too-many-branches, disable=too-many-statements def main(): global OUTPUT_LOGGER try: start_time = time.time() - parser = argparse.ArgumentParser(description=__version__.__summary__) + project_metadata = metadata.metadata('gitlab-watchman') + parser = argparse.ArgumentParser(description='Finding exposed secrets and personal data in GitLab') required = parser.add_argument_group('required arguments') required.add_argument('--timeframe', choices=['d', 'w', 'm', 'a'], dest='time', help='How far back to search: d = 24 hours w = 7 days, m = 30 days, a = all time', @@ -135,7 +156,7 @@ def main(): parser.add_argument('--output', '-o', choices=['json', 'stdout'], dest='logging_type', help='Where to send results') parser.add_argument('--version', '-v', action='version', - version=f'gitlab-watchman {__version__.__version__}') + version=f'GitLab Watchman: {project_metadata.get("version")}') parser.add_argument('--all', '-a', dest='everything', action='store_true', help='Find everything') parser.add_argument('--blobs', '-b', dest='blobs', action='store_true', @@ -163,7 +184,6 @@ def main(): 'This includes more fields, but is larger') args = parser.parse_args() - tm = args.time everything = args.everything blobs = args.blobs commits = args.commits @@ -178,32 +198,29 @@ def main(): debug = args.debug enum = args.enum - if tm == 'd': - tf = 86400 - elif tm == 'w': - tf = 604800 - elif tm == 'm': - tf = 2592000 - else: - tf = calendar.timegm(time.gmtime()) + 1576800000 + tf_options = { + 'd': 86400, + 'w': 604800, + 'm': 2592000, + 'a': calendar.timegm(time.gmtime()) + 1576800000 + } + timeframe = tf_options.get(args.time) OUTPUT_LOGGER = init_logger(logging_type, debug) - if validate_variables(): - connection = gitlab_wrapper.initiate_gitlab_connection( - os.environ.get('GITLAB_WATCHMAN_TOKEN'), - os.environ.get('GITLAB_WATCHMAN_URL'), - OUTPUT_LOGGER) - else: - raise Exception('Either GITLAB_WATCHMAN_TOKEN or GITLAB_WATCHMAN_URL environment variables not set') + config = validate_variables() + disabled_signatures = config.get('disabled_signatures', []) + gitlab_client = watchman_processor.initiate_gitlab_connection( + os.environ.get('GITLAB_WATCHMAN_TOKEN'), + os.environ.get('GITLAB_WATCHMAN_URL')) now = int(time.time()) today = datetime.date.today().strftime('%Y-%m-%d') - start_date = time.strftime('%Y-%m-%d', time.localtime(now - tf)) + start_date = time.strftime('%Y-%m-%d', time.localtime(now - timeframe)) OUTPUT_LOGGER.log('SUCCESS', 'GitLab Watchman started execution') - OUTPUT_LOGGER.log('INFO', f'Version: {__version__.__version__}') - OUTPUT_LOGGER.log('INFO', f'Created by: {__version__.__author__} - {__version__.__email__}') + OUTPUT_LOGGER.log('INFO', f'Version: {project_metadata.get("version")}') + OUTPUT_LOGGER.log('INFO', 'Created by: PaperMtn ') OUTPUT_LOGGER.log('INFO', f'Searching GitLab instance {os.environ.get("GITLAB_WATCHMAN_URL")}') OUTPUT_LOGGER.log('INFO', f'Searching from {start_date} to {today}') if verbose: @@ -211,126 +228,126 @@ def main(): else: OUTPUT_LOGGER.log('INFO', 'Using non-verbose logging') - OUTPUT_LOGGER.log('INFO', 'Downloading signature file updates') - signature_updater.SignatureUpdater(OUTPUT_LOGGER).update_signatures() - OUTPUT_LOGGER.log('INFO', 'Importing signatures...') - signature_list = load_signatures() + OUTPUT_LOGGER.log('INFO', 'Downloading and importing signatures') + signature_list = SignatureDownloader(OUTPUT_LOGGER).download_signatures() + if len(disabled_signatures) > 0: + signature_list = supress_disabled_signatures(signature_list, disabled_signatures) + OUTPUT_LOGGER.log('INFO', f'The following signatures have been suppressed: {disabled_signatures}') OUTPUT_LOGGER.log('SUCCESS', f'{len(signature_list)} signatures loaded') OUTPUT_LOGGER.log('INFO', f'{multiprocessing.cpu_count() - 1} cores being used') - instance_metadata = connection.get_metadata() + instance_metadata = gitlab_client.get_metadata() OUTPUT_LOGGER.log('INSTANCE', instance_metadata, detect_type='Instance', notify_type='instance') - authenticated_user = connection.get_user_info() + authenticated_user = gitlab_client.get_user_info() OUTPUT_LOGGER.log('USER', authenticated_user, detect_type='User', notify_type='user') if authenticated_user.get('is_admin'): OUTPUT_LOGGER.log('SUCCESS', 'This user is an administrator on this GitLab instance!') - token_info = connection.get_authed_access_token_value() + token_info = gitlab_client.get_authed_access_token_value() OUTPUT_LOGGER.log('TOKEN', token_info, detect_type='Token', notify_type='token') if enum: OUTPUT_LOGGER.log('SUCCESS', 'Carrying out enumeration') OUTPUT_LOGGER.log('INFO', 'Enumerating users...') - gitlab_user_output = connection.get_all_users() + gitlab_user_output = gitlab_client.get_all_users() user_objects = [] for u in gitlab_user_output: user_objects.append(user.create_from_dict(u)) OUTPUT_LOGGER.log('SUCCESS', f'{len(gitlab_user_output)} users discovered') OUTPUT_LOGGER.log('INFO', 'Writing to csv') - gw_logger.export_csv('gitlab_users', user_objects) + log_to_csv('gitlab_users', user_objects) OUTPUT_LOGGER.log( 'SUCCESS', f'Users output to CSV file: {os.path.join(os.getcwd(), "gitlab_users.csv")}') OUTPUT_LOGGER.log('INFO', 'Enumerating groups...') - gitlab_groups_output = connection.get_all_groups() + gitlab_groups_output = gitlab_client.get_all_groups() group_objects = [] for g in gitlab_groups_output: group_objects.append(group.create_from_dict(g)) OUTPUT_LOGGER.log('SUCCESS', f'{len(group_objects)} groups discovered') OUTPUT_LOGGER.log('INFO', 'Writing to csv') - gw_logger.export_csv('gitlab_groups', group_objects) + log_to_csv('gitlab_groups', group_objects) OUTPUT_LOGGER.log( 'SUCCESS', f'Groups output to CSV file: {os.path.join(os.getcwd(), "gitlab_groups.csv")}') OUTPUT_LOGGER.log('INFO', 'Enumerating projects...') - gitlab_projects_output = connection.get_all_projects() + gitlab_projects_output = gitlab_client.get_all_projects() project_objects = [] for p in gitlab_projects_output: project_objects.append(project.create_from_dict(p)) OUTPUT_LOGGER.log('SUCCESS', f'{len(project_objects)} projects discovered') OUTPUT_LOGGER.log('INFO', 'Writing to csv') - gw_logger.export_csv('gitlab_projects', project_objects) + log_to_csv('gitlab_projects', project_objects) OUTPUT_LOGGER.log( 'SUCCESS', f'Projects output to CSV file: {os.path.join(os.getcwd(), "gitlab_projects.csv")}') + search_args = SearchArgs( + gitlab_client=gitlab_client, + sig_list=signature_list, + timeframe=timeframe, + logging_type=logging_type, + log_handler=OUTPUT_LOGGER, + debug=debug, + verbose=verbose, + scopes=[]) + if everything: OUTPUT_LOGGER.log('INFO', 'Getting everything...') - for sig in signature_list: - if 'blobs' in sig.scope: - search(connection, sig, tf, 'blobs', verbose) - if 'commits' in sig.scope: - search(connection, sig, tf, 'commits', verbose) - if 'issues' in sig.scope: - search(connection, sig, tf, 'issues', verbose) - if 'merge_requests' in sig.scope: - search(connection, sig, tf, 'merge_requests', verbose) - if 'wiki_blobs' in sig.scope: - search(connection, sig, tf, 'wiki_blobs', verbose) - if 'milestones' in sig.scope: - search(connection, sig, tf, 'milestones', verbose) - if 'notes' in sig.scope: - search(connection, sig, tf, 'notes', verbose) - if 'snippet_titles' in sig.scope: - search(connection, sig, tf, 'snippet_titles', verbose) + search_args.scopes = [ + 'blobs', + 'commits', + 'issues', + 'merge_requests', + 'wiki_blobs', + 'milestones', + 'notes', + 'snippet_titles' + ] + perform_search(search_args) else: if blobs: OUTPUT_LOGGER.log('INFO', 'Searching blobs') - for sig in signature_list: - if 'blobs' in sig.scope: - search(connection, sig, tf, 'blobs', verbose) + search_args.scopes = ['blobs'] + perform_search(search_args) if commits: - OUTPUT_LOGGER.log('INFO', 'Searching commits', verbose) - for sig in signature_list: - if 'commits' in sig.scope: - search(connection, sig, tf, 'commits', verbose) + OUTPUT_LOGGER.log('INFO', 'Searching commits') + search_args.scopes = ['commits'] + perform_search(search_args) if issues: OUTPUT_LOGGER.log('INFO', 'Searching issues') - for sig in signature_list: - if 'issues' in sig.scope: - search(connection, sig, tf, 'issues', verbose) + search_args.scopes = ['issues'] + perform_search(search_args) if merge: OUTPUT_LOGGER.log('INFO', 'Searching merge requests') - for sig in signature_list: - if 'merge_requests' in sig.scope: - search(connection, sig, tf, 'merge_requests', verbose) + search_args.scopes = ['merge_requests'] + perform_search(search_args) if wiki: OUTPUT_LOGGER.log('INFO', 'Searching wiki blobs') - for sig in signature_list: - if 'wiki_blobs' in sig.scope: - search(connection, sig, tf, 'wiki_blobs', verbose) + search_args.scopes = ['wiki_blobs'] + perform_search(search_args) if milestones: OUTPUT_LOGGER.log('INFO', 'Searching milestones') - for sig in signature_list: - if 'milestones' in sig.scope: - search(connection, sig, tf, 'milestones', verbose) + search_args.scopes = ['milestones'] + perform_search(search_args) if notes: OUTPUT_LOGGER.log('INFO', 'Searching notes') - for sig in signature_list: - if 'notes' in sig.scope: - search(connection, sig, tf, 'notes', verbose) + search_args.scopes = ['notes'] + perform_search(search_args) if snippets: OUTPUT_LOGGER.log('INFO', 'Searching snippets') - for sig in signature_list: - if 'snippet_titles' in sig.scope: - search(connection, sig, tf, 'snippet_titles', verbose) + search_args.scopes = ['snippet_titles'] + perform_search(search_args) OUTPUT_LOGGER.log('SUCCESS', f'GitLab Watchman finished execution - Execution time:' f' {str(datetime.timedelta(seconds=time.time() - start_time))}') - except exceptions.ElasticsearchMissingError as e: + except (ElasticsearchMissingError, + GitLabWatchmanNotAuthorisedError, + GitLabWatchmanGetObjectError, + GitLabWatchmanAuthenticationError) as e: OUTPUT_LOGGER.log('WARNING', e) OUTPUT_LOGGER.log('DEBUG', traceback.format_exc()) except Exception as e: diff --git a/src/gitlab_watchman/__main__.py b/src/gitlab_watchman/__main__.py index 427963d..8273c4f 100644 --- a/src/gitlab_watchman/__main__.py +++ b/src/gitlab_watchman/__main__.py @@ -1,3 +1,3 @@ from . import main -main() \ No newline at end of file +main() diff --git a/src/gitlab_watchman/__version__.py b/src/gitlab_watchman/__version__.py deleted file mode 100644 index 9272314..0000000 --- a/src/gitlab_watchman/__version__.py +++ /dev/null @@ -1,18 +0,0 @@ -__all__ = [ - '__title__', - '__summary__', - '__uri__', - '__version__', - '__author__', - '__email__', - '__license__', -] - -__title__ = 'GitLab Watchman' -__summary__ = 'Finding exposed secrets and personal data in GitLab' -__uri__ = 'https://github.com/PaperMtn/gitlab-watchman' -__version__ = '3.0.0' -__author__ = 'PaperMtn' -__email__ = 'papermtn@protonmail.com' -__license__ = 'GPL-3.0' -__copyright__ = f'2023 {__author__}' diff --git a/src/gitlab_watchman/clients/__init__.py b/src/gitlab_watchman/clients/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/gitlab_watchman/clients/gitlab_client.py b/src/gitlab_watchman/clients/gitlab_client.py new file mode 100644 index 0000000..f71474b --- /dev/null +++ b/src/gitlab_watchman/clients/gitlab_client.py @@ -0,0 +1,352 @@ +import calendar +import time +from typing import List, Dict, Any + +import requests +from gitlab import Gitlab +from gitlab.const import SearchScope +from gitlab.v4.objects import User +from gitlab.exceptions import ( + GitlabLicenseError, + GitlabAuthenticationError, + GitlabGetError, + GitlabListError, + GitlabSearchError, + GitlabHttpError +) +from gitlab_watchman.exceptions import ( + GitLabWatchmanAuthenticationError, + GitLabWatchmanGetObjectError, + GitLabWatchmanNotAuthorisedError +) + +ALL_TIME = calendar.timegm(time.gmtime()) + 1576800000 + + +def exception_handler(func): + """ Decorator to handle exceptions raised by the GitLab API + """ + + def inner_function(*args, **kwargs): + try: + return func(*args, **kwargs) + except GitlabAuthenticationError as e: + raise GitLabWatchmanAuthenticationError(e.error_message) from e + except (GitlabGetError, + GitlabListError, + GitlabLicenseError, + GitlabSearchError, + GitlabHttpError) as e: + if e.response_code == 403: + raise GitLabWatchmanNotAuthorisedError(e.error_message, func) from e + elif e.response_code == 500: + pass + else: + raise GitLabWatchmanGetObjectError(e.error_message, func, args) from e + except IndexError: + pass + except Exception as e: + raise e + + return inner_function + + +class GitLabAPIClient: + """ Class to interact with the GitLab API + + Attributes: + base_url: Base URL for the GitLab instance + session: Session object to make requests + gitlab_client: GitLab client object to interact with the API + """ + + @exception_handler + def __init__(self, + token: str, + base_url: str): + self.base_url = base_url.rstrip('\\') + self.session = session = requests.session() + session.headers.update({'Authorization': f'Bearer {token}'}) + self.gitlab_client = Gitlab( + url=self.base_url, + private_token=token, + session=self.session, + per_page=100, + retry_transient_errors=True, + api_version='4') + self.gitlab_client.auth() + + @exception_handler + def get_user_info(self) -> Dict[str, Any]: + """ Get information on the authenticated user + + Returns: + User object with user information + Raises: + GitLabWatchmanNotAuthorisedError: If the user is not authorized to access the resource + GitlabWatchmanGetObjectError: If an error occurs while getting the object + """ + return self.gitlab_client.user.asdict() + + @exception_handler + def get_all_users(self) -> List[User]: + """ Get all users in the GitLab instance + + Returns: + User object with user information + Raises: + GitLabWatchmanNotAuthorisedError: If the user is not authorized to access the resource + GitlabWatchmanGetObjectError: If an error occurs while getting the object + """ + return self.gitlab_client.users.list(get_all=True, active=True, without_project_bots=True) + + @exception_handler + def get_user_by_username(self, username: str) -> Dict[str, Any] | None: + """ Get a GitLab user by their username + + Args: + username: Username of the user + Returns: + User object containing user data + Raises: + GitLabWatchmanNotAuthorisedError: If the user is not authorized to access the resource + GitlabWatchmanGetObjectError: If an error occurs while getting the object + """ + return self.gitlab_client.users.list(username=username, active=False, blocked=True)[0].asdict() + + @exception_handler + def get_settings(self) -> Dict[str, Any]: + """ Get the settings for the GitLab instance + + Returns: + JSON object with settings + Raises: + GitLabWatchmanNotAuthorisedError: If the user is not authorized to access the resource + GitlabWatchmanGetObjectError: If an error occurs while getting the object + """ + return self.gitlab_client.settings.get().asdict() + + @exception_handler + def get_licence(self) -> Dict[str, Any]: + """ Get the licence for the GitLab instance + + Returns: + JSON object with metadata + Raises: + GitLabWatchmanNotAuthorisedError: If the user is not authorized to access the resource + GitlabWatchmanGetObjectError: If an error occurs while getting the object + """ + return self.gitlab_client.get_license() + + @exception_handler + def get_metadata(self) -> Dict[str, Any]: + """ Get GitLab project metadata + + Returns: + JSON object with GitLab instance information + """ + return self.session.get(f'{self.base_url}/api/v4/metadata').json() + + @exception_handler + def get_instance_level_variables(self) -> List[Any]: + """ Get any instance-level CICD variables + + Returns: + JSON object with variable information + Raises: + GitLabWatchmanNotAuthorisedError: If the user is not authorized to access the resource + GitlabWatchmanGetObjectError: If an error occurs while getting the object + """ + + return self.gitlab_client.variables.list(as_list=True) + + @exception_handler + def get_authed_access_token_value(self) -> Dict: + """ Get the value of a personal access token + + Returns: + JSON object with token information + Raises: + GitLabWatchmanNotAuthorisedError: If the user is not authorized to access the resource + """ + return self.session.get(f'{self.base_url}/api/v4/personal_access_tokens/self').json() + + @exception_handler + def get_project(self, project_id: str) -> Dict[str, Any]: + """ Get a GitLab project by its ID + + Args: + project_id: ID of the project to return + Returns: + JSON object with project information + Raises: + GitLabWatchmanNotAuthorisedError: If the user is not authorized to access the resource + GitLabWatchmanGetObjectError: If an error occurs while getting the object + """ + return self.gitlab_client.projects.get(project_id).asdict() + + @exception_handler + def get_all_projects(self) -> List[Dict]: + """ Get all GitLab projects. + + Returns: + List of all projects + Raises: + GitLabWatchmanNotAuthorisedError: If the user is not authorized to access the resource + GitLabWatchmanGetObjectError: If an error occurs while getting the object + """ + projects = self.gitlab_client.projects.list(all=True, as_list=True) + return [project.asdict() for project in projects] + + @exception_handler + def get_project_members(self, project_id: str) -> List[Dict[str, Any]]: + """ Get members of a project + + Args: + project_id: ID of the project to retrieve + Returns: + RESTObject object containing project members + Raises: + GitLabWatchmanNotAuthorisedError: If the user is not authorized to access the resource + GitLabWatchmanGetObjectError: If an error occurs while getting the object + """ + members = self.gitlab_client.projects.get(project_id).members.list(as_list=True, get_all=True) + return [member.asdict() for member in members] + + @exception_handler + def get_file(self, + project_id: str, + path: str, + ref: str) -> Dict[str, Any]: + """ Get a file stored in a project + + Args: + project_id: ID of the project to retrieve + path: URL encoded full path to file + ref: The name of branch, tag or commit + Returns: + JSON object with the file information + Raises: + GitLabWatchmanNotAuthorisedError: If the user is not authorized to access the resource + GitLabWatchmanGetObjectError: If an error occurs while getting the object + """ + return self.gitlab_client.projects.get(project_id).files.get( + file_path=path, ref=ref).asdict() + + @exception_handler + def get_group(self, group_id: str) -> Dict[str, Any]: + """ Get a GitLab group by its ID + + Args: + group_id: ID of the group to return + Returns: + Dict with group information + Raises: + GitLabWatchmanNotAuthorisedError: If the user is not authorized to access the resource + GitLabWatchmanGetObjectError: If an error occurs while getting the object + """ + return self.gitlab_client.groups.get(group_id).asdict() + + @exception_handler + def get_all_groups(self) -> List[Dict]: + """ Get all groups visible to the authenticated user + + Returns: + Dict with group information + Raises: + GitLabWatchmanNotAuthorisedError: If the user is not authorized to access the resource + GitLabWatchmanGetObjectError: If an error occurs while getting the object + """ + groups = self.gitlab_client.groups.list(as_list=True) + return [group.asdict() for group in groups] + + @exception_handler + def get_group_members(self, group_id: str) -> List[Dict]: + """ Get members of a GitLab group + + Args: + group_id: ID of the group to get members for + Returns: + Dict object with group member information + Raises: + GitLabWatchmanNotAuthorisedError: If the user is not authorized to access the resource + GitLabWatchmanGetObjectError: If an error occurs while getting the object + """ + members = self.gitlab_client.groups.get(group_id).members.list(as_list=True, get_all=True) + return [member.asdict() for member in members] + + @exception_handler + def get_commit(self, + project_id: str, + commit_id: str) -> Dict[str, Any]: + """ Get commit information + + Args: + project_id: ID for the project the commit exists in + commit_id: ID of the commit + Returns: + Dict object containing commit data + Raises: + GitLabWatchmanNotAuthorisedError: If the user is not authorized to access the resource + GitLabWatchmanGetObjectError: If an error occurs while getting the object + """ + return self.gitlab_client.projects.get(project_id).commits.get(commit_id).asdict() + + @exception_handler + def get_wiki_page(self, + project_id: str, + slug: str) -> Dict[str, Any]: + """ Get a wiki page from a project + + Args: + project_id: ID of the project the wiki page is in + slug: URL slug for the wiki page + Returns: + JSON object containing wiki data + Raises: + GitLabWatchmanNotAuthorisedError: If the user is not authorized to access the resource + GitLabWatchmanGetObjectError: If an error occurs while getting the object + """ + return self.gitlab_client.projects.get(project_id).wikis.get(slug).asdict() + + @exception_handler + def global_search(self, + search_term: str = '', + search_scope: str = '') -> List[Dict[str, Any]]: + """ Search using the GitLab advanced search API. Uses search term and scope to + decide what to search for. + + Args: + search_term: Search string to use + search_scope: Scope of what to look for. One of: + - blobs + - commits + - issues + - merge_requests + - wiki_blobs + - milestones + - notes + - snippet_titles + Returns: + List containing Dict objects with matches for the search string + Raises: + GitLabWatchmanNotAuthorisedError: If the user is not authorized to access the resource + GitLabWatchmanGetObjectError: If an error occurs while getting the object + """ + scope_map = { + 'blobs': SearchScope.BLOBS, + 'commits': SearchScope.COMMITS, + 'issues': SearchScope.ISSUES, + 'merge_requests': SearchScope.MERGE_REQUESTS, + 'wiki_blobs': SearchScope.WIKI_BLOBS, + 'milestones': SearchScope.MILESTONES, + 'notes': SearchScope.PROJECT_NOTES, + 'snippet_titles': SearchScope.GLOBAL_SNIPPET_TITLES, + } + + return self.gitlab_client.search( + search=search_term, + scope=scope_map.get(search_scope, SearchScope.BLOBS), + all=True, + as_list=True, + per_page=100) diff --git a/src/gitlab_watchman/exceptions.py b/src/gitlab_watchman/exceptions.py index 4c2046b..b45fd8c 100644 --- a/src/gitlab_watchman/exceptions.py +++ b/src/gitlab_watchman/exceptions.py @@ -1,4 +1,9 @@ -class ElasticsearchMissingError(Exception): +class GitLabWatchmanError(Exception): + """ Base class for exceptions in GitLab Watchman. + """ + + +class ElasticsearchMissingError(GitLabWatchmanError): """ Exception raised when Elasticsearch is not enabled on the instance. """ @@ -8,7 +13,7 @@ def __init__(self, scope): super().__init__(self.message) -class MissingEnvVarError(Exception): +class MissingEnvVarError(GitLabWatchmanError): """ Exception raised when an environment variable is missing. """ @@ -16,3 +21,40 @@ def __init__(self, env_var): self.env_var = env_var self.message = f'Missing Environment Variable: {self.env_var}' super().__init__(self.message) + + +class GitLabWatchmanAuthenticationError(GitLabWatchmanError): + """ Exception raised when unable to authenticate to GitLab. + """ + + def __init__(self, error_message: str): + super().__init__('Unable to authenticate to GitLab: ' + error_message) + self.error_message = error_message + + +class GitLabWatchmanGetObjectError(GitLabWatchmanError): + """ Exception raised when an error occurs while getting a GitLab API object. + """ + + def __init__(self, error_message: str, func, arg): + super().__init__(f'GitLab get object error: {error_message} - Function: {func.__name__} - Arg: {arg}') + self.error_message = error_message + + +class GitLabWatchmanNotAuthorisedError(GitLabWatchmanError): + """ Exception raised when the authenticated user is not authorized to access the + resource on the GitLab API. + """ + + def __init__(self, error_message: str, func): + super().__init__(f'Not authorised: {error_message} - {func.__name__}') + self.error_message = error_message + + +class MisconfiguredConfFileError(Exception): + """ Exception raised when the config file watchman.conf is missing. + """ + + def __init__(self): + self.message = f"The file watchman.conf doesn't contain config details for GitLab Watchman" + super().__init__(self.message) diff --git a/src/gitlab_watchman/gitlab_wrapper.py b/src/gitlab_watchman/gitlab_wrapper.py deleted file mode 100644 index 88302d2..0000000 --- a/src/gitlab_watchman/gitlab_wrapper.py +++ /dev/null @@ -1,854 +0,0 @@ -import calendar -import dataclasses -import json -import re -import time -import requests -import multiprocessing -from requests.exceptions import HTTPError -from urllib3.util import Retry -from requests.adapters import HTTPAdapter -from urllib.parse import quote -from typing import List, Dict - -from . import gw_logger -from . import exceptions -from .models import ( - signature, - note, - snippet, - blob, - wiki_blob, - file, - commit, - user, - merge_request, - milestone, - issue, - project -) - -ALL_TIME = calendar.timegm(time.gmtime()) + 1576800000 - - -class GitLabAPIClient(object): - - def __init__(self, - token: str, - base_url: str, - logger: gw_logger.StdoutLogger or gw_logger.JSONLogger): - self.token = token - self.base_url = base_url.rstrip('\\') - self.per_page = 100 - self.session = session = requests.session() - session.mount(self.base_url, - HTTPAdapter( - max_retries=Retry( - total=5, - backoff_factor=0.3, - status_forcelist=[500, 502, 503, 504]))) - session.headers.update({'Authorization': f'Bearer {self.token}'}) - - if isinstance(logger, gw_logger.JSONLogger): - self.logger = None - else: - self.logger = logger - - def _make_request(self, - url: str, - params=None, - data=None, - method='GET', - verify_ssl=True): - try: - relative_url = '/'.join((self.base_url, 'api/v4', url)) - response = self.session.request(method, relative_url, params=params, data=data, verify=verify_ssl) - response.raise_for_status() - - return response - - except HTTPError as http_error: - if response.status_code == 400: - if response.json().get('message').get('error') == 'Scope not supported without Elasticsearch!': - raise exceptions.ElasticsearchMissingError(params.get('scope')) - else: - raise http_error - elif response.status_code == 429: - if self.logger: - self.logger.log( - mes_type='WARNING', - message='Rate limit hit, cooling off for 90 seconds...') - else: - print('Rate limit hit, cooling off for 90 seconds...') - - time.sleep(90) - response = self.session.request(method, relative_url, params=params, data=data, verify=verify_ssl) - response.raise_for_status() - - return response - else: - raise - except: - raise - - def _get_pages(self, url, params): - first_page = self._make_request(url, params) - yield first_page.json() - num_pages = int(first_page.headers.get('X-Total-Pages')) - - for page in range(2, num_pages + 1): - params['page'] = str(page) - next_page = self._make_request(url, params=params).json() - yield next_page - - def page_api_search(self, - url: str, - search_scope: str = None, - search_term: str = None) -> List[Dict]: - """ Wrapper for GitLab API methods that use page number based pagination - Args: - search_scope: - search_term: - url: API endpoint to use - Returns: - A list of dict objects with responses - """ - - results = [] - params = { - 'per_page': self.per_page, - 'page': '', - 'scope': search_scope, - 'search': search_term, - } - - for page in self._get_pages(url, params): - results.append(page) - - return [item for sublist in results for item in sublist] - - def get_user_by_id(self, user_id: str) -> json: - """ Get a GitLab user by their ID - - Args: - user_id: ID of the user - Returns: - JSON object containing user data - """ - return self._make_request(f'users/{user_id}').json() - - def get_user_by_username(self, username: str) -> json: - """ Get a GitLab user by their username - - Args: - username: Username of the user - Returns: - JSON object containing user data - """ - return self._make_request(f'users?username={username}').json() - - def get_token_user(self) -> json: - """ Get the details of the user who's token is being used - - Returns: - JSON object containing user data - """ - return self._make_request('user').json() - - def get_licence_info(self) -> json: - """ Get information on the GitLab licence - - Returns: - JSON object containing licence information - """ - return self._make_request('license').json() - - def get_metadata(self) -> Dict: - """ Get GitLab project metadata - - Returns: - JSON object with GitLab instance information - """ - return self._make_request(f'metadata').json() - - def get_user_info(self) -> Dict: - """ Get information on the authenticated user - - Returns: - JSON object with user information - """ - return self._make_request(f'user').json() - - def get_instance_level_variables(self) -> Dict: - """ Get any instance-level CICD variables - - Returns: - JSON object with variable information - """ - return self._make_request(f'admin/ci/variables').json() - - def get_personal_access_tokens(self) -> Dict: - """ Get personal access tokens available to this user - - Returns: - JSON object with token information - """ - return self._make_request(f'personal_access_tokens').json() - - def get_personal_access_token_value(self, token_id: str) -> Dict: - """ Get the value of a personal access token - - Returns: - JSON object with token information - """ - return self._make_request(f'personal_access_tokens/{token_id}').json() - - def get_authed_access_token_value(self) -> Dict: - """ Get the value of a personal access token - - Returns: - JSON object with token information - """ - return self._make_request(f'personal_access_tokens/self').json() - - def get_all_users(self) -> List[Dict]: - """ Get all users in the GitLab instance - - Returns: - JSON object with user information - """ - return self.page_api_search('users?active=true&without_project_bots=true') - - def get_project(self, project_id: str) -> json: - """ Get a GitLab project by its ID - - Args: - project_id: ID of the project to return - Returns: - JSON object with project information - """ - return self._make_request(f'projects/{project_id}').json() - - def get_variables(self, project_id: str) -> json: - """ Get publicly available CICD variables for a project - - Args: - project_id: ID of the project to search - Returns: - JSON object containing variable information - """ - return self._make_request(f'projects/{project_id}/variables').json() - - def get_project_members(self, project_id: str) -> json: - """ Get members of a project - - Args: - project_id: ID of the project to retrieve - Returns: - JSON object containing project members - """ - return self._make_request(f'projects/{project_id}/members').json() - - def get_file(self, - project_id: str, - path: str, - ref: str) -> json: - """ Get a file stored in a project - - Args: - project_id: ID of the project to retrieve - path: URL encoded full path to file - ref: The name of branch, tag or commit - Returns: - JSON object with file information - """ - path = ''.join((quote(path, safe=''), '?ref=', ref)) - return self._make_request(f'projects/{project_id}/repository/files/{path}').json() - - def get_group_members(self, group_id: str) -> json: - """ Get members of a GitLab group - - Args: - group_id: ID of the group to get members for - Returns: - JSON object with group member information - """ - return self._make_request(f'groups/{group_id}/members').json() - - def get_commit(self, - project_id: str, - commit_id: str) -> json: - """ Get commit information - - Args: - project_id: ID for the project the commit exists in - commit_id: ID of the commit - Returns: - JSON object containing commit data - """ - return self._make_request(f'projects/{project_id}/repository/commits/{commit_id}').json() - - def get_wiki_page(self, - project_id: str, - slug: str) -> json: - """ - - Args: - project_id: ID of the project the wiki page is in - slug: URL slug for the wiki page - Returns: - JSON object containing wiki data - - """ - return self._make_request(f'projects/{project_id}/wikis/{slug}').json() - - def global_search(self, - search_term: str = '', - search_scope: str = '') -> List[Dict]: - """ Wrapper for the GitLab advanced search API. Uses search term and scope to - decide what to search for. - - Args: - search_term: Search string to use - search_scope: Scope of what to look for (blobs, commits etc.) - Returns: - List containing JSON objects with matches for the search string - """ - return self.page_api_search('search', search_scope=search_scope, search_term=search_term) - - def get_all_projects(self) -> List[Dict]: - """ Get all public projects. Uses keyset pagination, which currently - is only available for the Projects resource in the GitLab API - - Returns: - List of all projects - """ - - results = [] - - params = { - 'pagination': 'keyset', - 'per_page': self.per_page, - 'order_by': 'id', - 'sort': 'asc' - } - - response = self._make_request('projects', params=params) - while 'link' in response.headers: - next_url = response.headers.get('link') - params = { - 'pagination': 'keyset', - 'per_page': self.per_page, - 'order_by': 'id', - 'sort': 'asc', - 'id_after': next_url.split('id_after=')[1].split('&')[0] - } - response = self._make_request('projects', params=params) - for value in response.json(): - results.append(value) - - return results - - def get_all_groups(self) -> List[Dict]: - """ Get all groups in the GitLab instance - - Returns: - JSON object with group information - """ - return self.page_api_search('groups?all_available=true') - - -def initiate_gitlab_connection(token: str, - url: str, - logger: gw_logger.StdoutLogger or gw_logger.JSONLogger) -> GitLabAPIClient: - """ Create a GitLab API client object - - Returns: - GitLab API client object - """ - - try: - return GitLabAPIClient(token, url, logger) - except Exception as e: - raise e - - -def _convert_time(timestamp: str) -> int: - """Convert ISO 8601 timestamp to epoch """ - - pattern = '%Y-%m-%dT%H:%M:%S.%f%z' - return int(time.mktime(time.strptime(timestamp, pattern))) - - -def _deduplicate(input_list: List[Dict]) -> List[Dict]: - """ Removes duplicates where results are returned by multiple queries - Nested class handles JSON encoding for dataclass objects - - Args: - input_list: List of dataclass objects - Returns: - List of JSON objects with duplicates removed - """ - - class EnhancedJSONEncoder(json.JSONEncoder): - def default(self, o): - if dataclasses.is_dataclass(o): - return dataclasses.asdict(o) - return super().default(o) - - json_set = {json.dumps(dictionary, sort_keys=True, cls=EnhancedJSONEncoder) for dictionary in input_list} - - return [json.loads(t) for t in json_set] - - -def _split_to_chunks(input_list, no_of_chunks): - """Split the input list into n amount of chunks""" - - return (input_list[i::no_of_chunks] for i in range(no_of_chunks)) - - -def find_group_owners(group_members: List[Dict]) -> List[Dict]: - """ Return all users who are both active and group Owners - - Args: - group_members: Members of a GitLab group - Returns: - List of owners of a group - """ - - member_list = [] - for user in group_members: - if user.get('state') == 'active' and user.get('access_level') == 50: - member_list.append({ - 'user_id': user.get('id'), - 'name': user.get('name'), - 'username': user.get('username'), - 'access_level': 'Owner' - }) - - return member_list - - -def find_user_owner(user_list: List[Dict]) -> List[Dict]: - """ Return user who owns a namespace - - Args: - user_list: List of users - Returns: - List of formatted users owning a namespace - """ - - owner_list = [] - for user in user_list: - owner_list.append({ - 'user_id': user.get('id'), - 'name': user.get('name'), - 'username': user.get('username'), - 'state': user.get('state') - }) - - return owner_list - - -def search(gitlab: GitLabAPIClient, - log_handler: gw_logger.StdoutLogger or gw_logger.JSONLogger, - sig: signature.Signature, - scope: str, - verbose: bool, - timeframe: int = ALL_TIME) -> List[Dict]: - """ Uses the Search API to get search results for the given scope. These results are then split into (No of cores - - 1) number of chunks, and Multiprocessing is then used to concurrently filter them against Regex using the relevant - worker function - - Args: - gitlab: GitLab API object - log_handler: Logger object for outputting results - sig: Signature object - scope: What sort of GitLab objects to search - verbose: Whether to use verbose logging or not - timeframe: Timeframe to search in - Returns: - List of JSON formatted results if any are found - """ - - results = [] - - for query in sig.search_strings: - for pattern in sig.patterns: - regex = re.compile(pattern) - search_result_list = gitlab.global_search(query, search_scope=scope) - query_formatted = query.replace('"', '') - log_handler.log('INFO', - f'{len(search_result_list)} {scope} found matching search term: {query_formatted}') - result = multiprocessing.Manager().list() - - chunks = multiprocessing.cpu_count() - 1 - list_of_chunks = _split_to_chunks(search_result_list, chunks) - - processes = [] - - if scope == 'blobs': - target = _blob_worker - elif scope == 'wiki_blobs': - target = _wiki_blob_worker - elif scope == 'commits': - target = _commit_worker - elif scope == 'issues': - target = _issue_worker - elif scope == 'milestones': - target = _milestone_worker - elif scope == 'notes': - target = _note_worker - elif scope == 'snippet_titles': - target = _snippet_worker - else: - target = _merge_request_worker - - for search_list in list_of_chunks: - p = multiprocessing.Process(target=target, - args=( - gitlab, - search_list, - regex, - timeframe, - result, - verbose - )) - processes.append(p) - p.start() - - for process in processes: - process.join() - - results.append(list(result)) - - if results: - results = _deduplicate([item for sublist in results for item in sublist]) - log_handler.log('INFO', f'{len(results)} total matches found after filtering') - return results - else: - log_handler.log('INFO', 'No matches found after filtering') - - -def _populate_project_owners(gitlab: GitLabAPIClient, - project_object: project.Project) -> project.Project: - """ Populates a given project with either the user who owns it if the namespace kind == user, - or members of the group who are owners if the namespace kind == group - - Args: - gitlab: GitLab API object - project_object: Project to populate the owners of - Returns: - Project object with owners populated - """ - - if project_object.namespace.kind == 'group': - group_members = gitlab.get_group_members(project_object.namespace.id) - owners = find_group_owners(group_members) - if owners: - owner_list = [] - for owner in owners: - owner_list.append(user.create_from_dict(owner)) - project_object.namespace.members = owners - project_object.namespace.owner = None - elif project_object.namespace.kind == 'user': - namespace_user = gitlab.get_user_by_username(project_object.namespace.full_path) - if namespace_user: - project_object.namespace.owner = user.create_from_dict(namespace_user[0]) - project_object.namespace.members = None - - return project_object - - -def _blob_worker(gitlab: GitLabAPIClient, - blob_list: List[Dict], - regex: re.Pattern, - timeframe: int, - results: List, - verbose: bool, - **kwargs) -> List[Dict]: - """ MULTIPROCESSING WORKER - Iterates through a list of blobs to find matches against the regex - - Args: - gitlab: GitLab API object - blob_list: List of blobs to process - regex: Regex pattern to search for - timeframe: Timeframe to search in - results: List of output results - verbose: Whether to use verbose logging or not - Returns: - Multiprocessing list to be combined by the parent process - """ - - now = calendar.timegm(time.gmtime()) - for b in blob_list: - blob_object = blob.create_from_dict(b) - project_object = project.create_from_dict(gitlab.get_project(blob_object.project_id)) - file_object = file.create_from_dict(gitlab.get_file(blob_object.project_id, blob_object.path, blob_object.ref)) - if file_object: - commit_object = commit.create_from_dict( - gitlab.get_commit(blob_object.project_id, file_object.commit_id)) - if _convert_time(commit_object.committed_date) > (now - timeframe) and regex.search(str(blob_object.data)): - match_string = regex.search(str(blob_object.data)).group(0) - if not verbose: - setattr(blob_object, 'data', None) - results.append({ - 'match_string': match_string, - 'blob': blob_object, - 'commit': commit_object, - 'project': _populate_project_owners(gitlab, project_object), - 'file': file_object - }) - return results - - -def _wiki_blob_worker(gitlab: GitLabAPIClient, - blob_list: List[Dict], - regex: re.Pattern, - timeframe: int, - results: List, - verbose: bool) -> List[Dict]: - """ MULTIPROCESSING WORKER - Iterates through a list of wiki_blobs to find matches against the regex - - Args: - gitlab: GitLab API object - blob_list: List of wiki_blobs to process - regex: Regex pattern to search for - timeframe: Timeframe to search in - results: List of output results - verbose: Whether to use verbose logging or not - Returns: - Multiprocessing list to be combined by the parent process - """ - - now = calendar.timegm(time.gmtime()) - for wb in blob_list: - wikiblob_object = wiki_blob.create_from_dict(wb) - project_object = project.create_from_dict(gitlab.get_project(wikiblob_object.project_id)) - if _convert_time(project_object.last_activity_at) > (now - timeframe) and regex.search( - str(wikiblob_object.data)): - match_string = regex.search(str(wikiblob_object.data)).group(0) - if not verbose: - setattr(wikiblob_object, 'data', None) - results.append({ - 'match_string': match_string, - 'wiki_blob': wikiblob_object, - 'project': _populate_project_owners(gitlab, project_object), - }) - - return results - - -def _commit_worker(gitlab: GitLabAPIClient, - commit_list: List[Dict], - regex: re.Pattern, - timeframe: int, - results: List, - verbose: bool) -> List[Dict]: - """ MULTIPROCESSING WORKER - Iterates through a list of commits to find matches against the regex - - Args: - gitlab: GitLab API object - commit_list: List of commits to process - regex: Regex pattern to search for - timeframe: Timeframe to search in - results: List of output results - verbose: Whether to use verbose searching or not - Returns: - Multiprocessing list to be combined by the parent process - """ - - now = calendar.timegm(time.gmtime()) - - for c in commit_list: - commit_object = commit.create_from_dict(c) - if _convert_time(commit_object.committed_date) > (now - timeframe) and \ - regex.search(str(commit_object.message)): - project_object = project.create_from_dict(gitlab.get_project(commit_object.project_id)) - results.append({ - 'match_string': regex.search(str(commit_object.message)).group(0), - 'commit': commit_object, - 'project': _populate_project_owners(gitlab, project_object) - }) - - return results - - -def _issue_worker(gitlab: GitLabAPIClient, - issue_list: List[Dict], - regex: re.Pattern, - timeframe: int, - results: List, - verbose: bool) -> List[Dict]: - """ MULTIPROCESSING WORKER - Iterates through a list of issues to find matches against the regex - - Args: - gitlab: GitLab API object - issue_list: List of issues to process - regex: Regex pattern to search for - timeframe: Timeframe to search in - results: List of output results - verbose: Whether to use verbose logging - Returns: - Multiprocessing list to be combined by the parent process - """ - - now = calendar.timegm(time.gmtime()) - for i in issue_list: - issue_object = issue.create_from_dict(i) - if _convert_time(issue_object.updated_at) > (now - timeframe) and \ - regex.search(str(issue_object.description)): - match_string = regex.search(str(issue_object.description)).group(0) - if not verbose: - setattr(issue_object, 'description', None) - project_object = project.create_from_dict(gitlab.get_project(issue_object.project_id)) - results.append({ - 'match_string': match_string, - 'issue': issue_object, - 'project': _populate_project_owners(gitlab, project_object) - }) - - return results - - -def _milestone_worker(gitlab: GitLabAPIClient, - milestone_list: List[Dict], - regex: re.Pattern, - timeframe: int, - results: List, - verbose: bool) -> List[Dict]: - """ MULTIPROCESSING WORKER - Iterates through a list of milestones to find matches against the regex - - Args: - gitlab: GitLab API object - milestone_list: List of milestones to process - regex: Regex pattern to search for - timeframe: Timeframe to search in - results: List of output results - verbose: Whether to use verbose logging - Returns: - Multiprocessing list to be combined by the parent process - """ - - now = calendar.timegm(time.gmtime()) - for m in milestone_list: - milestone_object = milestone.create_from_dict(m) - if _convert_time(milestone_object.updated_at) > (now - timeframe) and \ - regex.search(str(milestone_object.description)): - project_object = project.create_from_dict(gitlab.get_project(milestone_object.project_id)) - match_string = regex.search(str(milestone_object.description)).group(0) - if not verbose: - setattr(milestone_object, 'description', None) - results.append({ - 'match_string': match_string, - 'milestone': milestone_object, - 'project': _populate_project_owners(gitlab, project_object) - }) - - return results - - -def _merge_request_worker(gitlab: GitLabAPIClient, - merge_request_list: List[Dict], - regex: re.Pattern, - timeframe: int, - results: List, - verbose: bool) -> List[Dict]: - """ MULTIPROCESSING WORKER - Iterates through a list of merge requests to find matches against the regex - - Args: - gitlab: GitLab API object - merge_request_list: List of merge requests to process - regex: Regex pattern to search for - timeframe: Timeframe to search in - results: List of output results - verbose: Whether to use verbose logging - Returns: - Multiprocessing list to be combined by the parent process - """ - - now = calendar.timegm(time.gmtime()) - for mr in merge_request_list: - mr_object = merge_request.create_from_dict(mr) - if _convert_time(mr_object.updated_at) > (now - timeframe) and \ - regex.search(str(mr_object.description)): - project_object = project.create_from_dict(gitlab.get_project(mr_object.project_id)) - match_string = regex.search(str(mr_object.description)).group(0) - if not verbose: - setattr(mr_object, 'description', None) - results.append({ - 'match_string': match_string, - 'merge_request': mr_object, - 'project': _populate_project_owners(gitlab, project_object) - }) - - return results - - -def _note_worker(gitlab_object: GitLabAPIClient, - note_list: List[Dict], - regex: re.Pattern, - timeframe: int, - results: List, - verbose: bool) -> List[Dict]: - """ MULTIPROCESSING WORKER - Iterates through a list of notes to find matches against the regex - - Args: - note_list: List of notes to process - regex: Regex pattern to search for - timeframe: Timeframe to search in - results: List of output results - verbose: Whether to use verbose logging - Returns: - Multiprocessing list to be combined by the parent process - """ - - now = calendar.timegm(time.gmtime()) - for n in note_list: - note_object = note.create_from_dict(n) - if _convert_time(note_object.created_at) > (now - timeframe) and \ - regex.search(str(note_object.body)): - match_string = regex.search(str(note_object.body)).group(0) - results.append({ - 'note': note_object, - 'match_string': match_string - }) - - return results - - -def _snippet_worker(gitlab_object: GitLabAPIClient, - snippet_list: List[Dict], - regex: re.Pattern, - timeframe: int, - results: List, - verbose: bool) -> List[Dict]: - """ MULTIPROCESSING WORKER - Iterates through a list of snippets to find matches against the regex - - Args: - snippet_list: List of notes to process - regex: Regex pattern to search for - timeframe: Timeframe to search in - results: List of output results - Returns: - Multiprocessing list to be combined by the parent process - """ - - now = calendar.timegm(time.gmtime()) - for s in snippet_list: - snippet_object = snippet.create_from_dict(s) - if _convert_time(snippet_object.created_at) > (now - timeframe) and \ - (regex.search(str(snippet_object.title)) or regex.search(str(snippet_object.description))): - if regex.search(str(snippet_object.title)): - match_string = regex.search(str(snippet_object.title)).group(0) - else: - match_string = regex.search(str(snippet_object.description)).group(0) - - if not verbose: - setattr(snippet_object, 'description', None) - results.append({ - 'snippet': snippet_object, - 'match_string': match_string - }) - - return results diff --git a/src/gitlab_watchman/gw_logger.py b/src/gitlab_watchman/loggers.py similarity index 84% rename from src/gitlab_watchman/gw_logger.py rename to src/gitlab_watchman/loggers.py index cdd3bf8..eb714a0 100644 --- a/src/gitlab_watchman/gw_logger.py +++ b/src/gitlab_watchman/loggers.py @@ -7,26 +7,37 @@ import re import traceback import csv +import urllib.parse from logging import Logger from typing import Any, Dict, List, ClassVar, Protocol from colorama import Fore, Back, Style, init +from gitlab_watchman.utils import EnhancedJSONEncoder + class StdoutLogger: + """ Class to log to stdout """ def __init__(self, **kwargs): self.debug = kwargs.get('debug') self.print_header() init() + # pylint: disable=too-many-branches def log(self, - mes_type: str, + msg_level: str, message: Any, **kwargs) -> None: + """ Log to stdout + + Args: + msg_level: Level message to log + message: Message data to log + """ notify_type = kwargs.get('notify_type') scope = kwargs.get('scope') - if not self.debug and mes_type == 'DEBUG': + if not self.debug and msg_level == 'DEBUG': return if dataclasses.is_dataclass(message): @@ -41,7 +52,7 @@ def log(self, f' URL: {message.get("kas").get("externalUrl")} \n'\ f' VERSION: {message.get("kas").get("version")} \n' \ f' ENTERPRISE: {message.get("enterprise")}' - mes_type = 'INSTANCE' + msg_level = 'INSTANCE' if notify_type == "user": message = f'USER: \n' \ f' ID: {message.get("id")} \n' \ @@ -54,7 +65,7 @@ def log(self, f' CAN_CREATE_GROUP: {message.get("can_create_group")} \n'\ f' CAN_CREATE_PROJECT: {message.get("can_create_project")} \n' \ f' 2FA_ENABLED: {message.get("two_factor_enabled")}' - mes_type = 'USER' + msg_level = 'USER' if notify_type == "token": message = f'PERSONAL_ACCESS_TOKEN: \n' \ f' ID: {message.get("id")} \n' \ @@ -65,13 +76,13 @@ def log(self, f' LAST_USED_AT: {message.get("last_used_at")} \n' \ f' ACTIVE: {message.get("active")} \n'\ f' EXPIRY: {message.get("expires_at", "Never")}' - mes_type = 'WARNING' + msg_level = 'WARNING' if notify_type == "result": if scope == 'blobs': message = 'SCOPE: Blob' \ - f' AUTHOR: {message.get("commit").get("author_name")} - ' \ - f'{message.get("commit").get("author_email")}' \ f' COMMITTED: {message.get("commit").get("committed_date")} \n' \ + f' AUTHOR: {message.get("commit").get("author_name")} ' \ + f'EMAIL: {message.get("commit").get("author_email")}\n' \ f' FILENAME: {message.get("blob").get("basename")} \n' \ f' URL: {message.get("project").get("web_url")}/-/blob/{message.get("blob").get("ref")}/' \ f'{message.get("blob").get("filename")} \n' \ @@ -100,10 +111,26 @@ def log(self, f' POTENTIAL_SECRET: {message.get("match_string")} \n' \ f' -----' elif scope == 'wiki_blobs': + if message.get('project_wiki'): + wiki_path = (f'{message.get("project").get("web_url")}/-/wikis/' + f'{urllib.parse.quote_plus(message.get("wiki_blob").get("path"))}') + elif message.get('group_wiki'): + wiki_path = (f'{message.get("group").get("web_url")}/-/wikis/' + f'{urllib.parse.quote_plus(message.get("wiki_blob").get("path"))}') + else: + wiki_path = 'N/A' + + if message.get('project_wiki'): + wiki_type = 'Project Wiki' + elif message.get('group_wiki'): + wiki_type = 'Group Wiki' + else: + wiki_type = '???' + message = 'SCOPE: Wiki Blob' \ f' FILENAME: {message.get("wiki_blob").get("filename")} \n' \ - f' URL: {message.get("project").get("web_url")}/-/wikis/' \ - f'{message.get("wiki_blob").get("basename")} \n' \ + f' WIKI_TYPE: {wiki_type} \n' \ + f' URL: {wiki_path} \n' \ f' POTENTIAL_SECRET: {message.get("match_string")} \n' \ f' -----' elif scope == 'issues': @@ -126,12 +153,12 @@ def log(self, f' URL: {message.get("snippet").get("web_url")} \n' \ f' POTENTIAL_SECRET: {message.get("match_string")} \n' \ f' -----' - mes_type = 'RESULT' + msg_level = 'RESULT' try: - self.log_to_stdout(message, mes_type) + self.log_to_stdout(message, msg_level) except Exception as e: print(e) - self.log_to_stdout(message, mes_type) + self.log_to_stdout(message, msg_level) def log_to_stdout(self, message: Any, @@ -207,7 +234,7 @@ def log_to_stdout(self, type_colorer = re.compile(r'([A-Z]{3,})', re.VERBOSE) mes_type = type_colorer.sub(high_color + r'\1' + base_color, mes_type.lower()) # Make header words coloured - header_words = re.compile('([A-Z_0-9]{2,}:)\s', re.VERBOSE) + header_words = re.compile(r'([A-Z_0-9]{2,}:)\s', re.VERBOSE) message = header_words.sub(key_color + Style.BRIGHT + r'\1 ' + Fore.WHITE + Style.NORMAL, str(message)) sys.stdout.write( f"{reset_all}{style}[{base_color}{mes_type}{Fore.WHITE}]{style} {message}{Fore.WHITE}{Style.NORMAL}\n") @@ -217,7 +244,10 @@ def log_to_stdout(self, sys.exit(1) print('Formatting error') - def print_header(self) -> None: + @staticmethod + def print_header() -> None: + """ Prints the header for the logger""" + print(" ".ljust(79) + Style.BRIGHT) print(Fore.LIGHTRED_EX + Style.BRIGHT + @@ -245,14 +275,8 @@ def print_header(self) -> None: print(' '.ljust(79) + Fore.GREEN) -class EnhancedJSONEncoder(json.JSONEncoder): - def default(self, o): - if dataclasses.is_dataclass(o): - return dataclasses.asdict(o) - return super().default(o) - - class JSONLogger(Logger): + """ Custom logger class for JSON logging""" def __init__(self, name: str = 'gitlab_watchman', **kwargs): super().__init__(name) self.notify_format = logging.Formatter( @@ -278,13 +302,13 @@ def __init__(self, name: str = 'gitlab_watchman', **kwargs): def log(self, level: str, - log_data: str or Dict, + msg: str or Dict, **kwargs): if level.upper() == 'NOTIFY': self.handler.setFormatter(self.notify_format) self.logger.info( json.dumps( - log_data, + msg, cls=EnhancedJSONEncoder), extra={ 'scope': kwargs.get('scope', ''), @@ -292,32 +316,33 @@ def log(self, 'severity': kwargs.get('severity', '')}) elif level.upper() == 'INFO': self.handler.setFormatter(self.info_format) - self.logger.info(json.dumps(log_data)) + self.logger.info(json.dumps(msg)) elif level.upper() == 'DEBUG': self.handler.setFormatter(self.info_format) - self.logger.info(json.dumps(log_data)) + self.logger.info(json.dumps(msg)) elif level.upper() == 'SUCCESS': self.handler.setFormatter(self.success_format) - self.logger.info(json.dumps(log_data)) + self.logger.info(json.dumps(msg)) elif level.upper() == 'INSTANCE': self.handler.setFormatter(self.instance_format) - self.logger.info(json.dumps(log_data)) + self.logger.info(json.dumps(msg)) elif level.upper() == 'USER': self.handler.setFormatter(self.user_format) - self.logger.info(json.dumps(log_data)) + self.logger.info(json.dumps(msg)) elif level.upper() == 'TOKEN': self.handler.setFormatter(self.token_format) - self.logger.info(json.dumps(log_data)) + self.logger.info(json.dumps(msg)) else: self.handler.setFormatter(self.info_format) - self.logger.critical(log_data) + self.logger.critical(msg) +# pylint: disable=missing-class-docstring class IsDataclass(Protocol): __dataclass_fields__: ClassVar[Dict] -def export_csv(csv_name: str, export_data: List[IsDataclass]) -> None: +def log_to_csv(csv_name: str, export_data: List[IsDataclass]) -> None: """ Export the data passed in a dataclass to CSV file Args: @@ -334,3 +359,18 @@ def export_csv(csv_name: str, export_data: List[IsDataclass]) -> None: f.close() except Exception as e: print(e) + + +def init_logger(logging_type: str, debug: bool) -> JSONLogger | StdoutLogger: + """ Create a logger object. Defaults to stdout if no option is given + + Args: + logging_type: Type of logging to use + debug: Whether to use debug level logging or not + Returns: + Logger object + """ + + if not logging_type or logging_type == 'stdout': + return StdoutLogger(debug=debug) + return JSONLogger(debug=debug) diff --git a/src/gitlab_watchman/models/blob.py b/src/gitlab_watchman/models/blob.py index bd45ee6..d120549 100644 --- a/src/gitlab_watchman/models/blob.py +++ b/src/gitlab_watchman/models/blob.py @@ -2,7 +2,7 @@ @dataclass(slots=True) -class Blob(object): +class Blob: """ Class that defines Blob objects for GitLab blobs""" basename: str diff --git a/src/gitlab_watchman/models/commit.py b/src/gitlab_watchman/models/commit.py index 3d97583..357b682 100644 --- a/src/gitlab_watchman/models/commit.py +++ b/src/gitlab_watchman/models/commit.py @@ -1,20 +1,24 @@ from dataclasses import dataclass +from datetime import datetime + +from gitlab_watchman.utils import convert_to_utc_datetime @dataclass(slots=True) -class Commit(object): +# pylint: disable=too-many-instance-attributes +class Commit: """ Class that defines File objects for GitLab files""" id: str - created_at: str + created_at: datetime | None title: str message: str author_name: str author_email: str - authored_date: str + authored_date: datetime | None committer_name: str committer_email: str - committed_date: str + committed_date: datetime | None web_url: str status: str project_id: str @@ -26,19 +30,19 @@ def create_from_dict(commit_dict: dict) -> Commit: Args: commit_dict: dict/JSON format data from GitLab API Returns: - A new Note object + A new Commit object """ return Commit( id=commit_dict.get('id'), - created_at=commit_dict.get('created_at'), + created_at=convert_to_utc_datetime(commit_dict.get('created_at')), title=commit_dict.get('title'), message=commit_dict.get('message'), author_name=commit_dict.get('author_name'), author_email=commit_dict.get('author_email'), - authored_date=commit_dict.get('authored_date'), + authored_date=convert_to_utc_datetime(commit_dict.get('authored_date')), committer_name=commit_dict.get('committer_name'), - committed_date=commit_dict.get('committed_date'), + committed_date=convert_to_utc_datetime(commit_dict.get('committed_date')), committer_email=commit_dict.get('committer_email'), web_url=commit_dict.get('web_url'), status=commit_dict.get('status'), diff --git a/src/gitlab_watchman/models/file.py b/src/gitlab_watchman/models/file.py index 5129251..6d5fcde 100644 --- a/src/gitlab_watchman/models/file.py +++ b/src/gitlab_watchman/models/file.py @@ -2,7 +2,7 @@ @dataclass(slots=True) -class File(object): +class File: """ Class that defines File objects for GitLab files""" file_name: str diff --git a/src/gitlab_watchman/models/group.py b/src/gitlab_watchman/models/group.py index 7c143a6..c451da1 100644 --- a/src/gitlab_watchman/models/group.py +++ b/src/gitlab_watchman/models/group.py @@ -1,8 +1,12 @@ from dataclasses import dataclass +from datetime import datetime + +from gitlab_watchman.utils import convert_to_utc_datetime @dataclass(slots=True) -class Group(object): +# pylint: disable=too-many-instance-attributes +class Group: """ Class that defines User objects for GitLab groups""" id: str @@ -17,7 +21,7 @@ class Group(object): request_access_enabled: bool full_name: str full_path: str - created_at: str + created_at: datetime | None web_url: str ip_restriction_ranges: str @@ -28,7 +32,7 @@ def create_from_dict(group_dict: dict) -> Group: Args: group_dict: dict/JSON format data from GitLab API Returns: - A new Project object + A new Group object """ return Group( @@ -44,7 +48,7 @@ def create_from_dict(group_dict: dict) -> Group: request_access_enabled=group_dict.get('request_access_enabled'), full_name=group_dict.get('full_name'), full_path=group_dict.get('full_path'), - created_at=group_dict.get('created_at'), + created_at=convert_to_utc_datetime(group_dict.get('created_at')), web_url=group_dict.get('web_url'), ip_restriction_ranges=group_dict.get('ip_restriction_ranges') ) diff --git a/src/gitlab_watchman/models/issue.py b/src/gitlab_watchman/models/issue.py index 967d555..dcc326a 100644 --- a/src/gitlab_watchman/models/issue.py +++ b/src/gitlab_watchman/models/issue.py @@ -1,10 +1,13 @@ from dataclasses import dataclass +from datetime import datetime -from . import user +from gitlab_watchman.models import user +from gitlab_watchman.utils import convert_to_utc_datetime @dataclass(slots=True) -class Issue(object): +# pylint: disable=too-many-instance-attributes +class Issue: """ Class that defines Issues objects for GitLab issues""" id: str @@ -13,10 +16,10 @@ class Issue(object): title: str description: str state: str - created_at: str - updated_at: str - closed_by: user.User - closed_at: str + created_at: datetime | None + updated_at: datetime | None + closed_by: user.User | None + closed_at: datetime | None author: str type: str author: user.User @@ -37,6 +40,11 @@ def create_from_dict(issue_dict: dict) -> Issue: else: closed_by = None + if issue_dict.get('author'): + author = user.create_from_dict(issue_dict.get('author')) + else: + author = None + return Issue( id=issue_dict.get('id'), iid=issue_dict.get('iid'), @@ -44,12 +52,12 @@ def create_from_dict(issue_dict: dict) -> Issue: title=issue_dict.get('title'), description=issue_dict.get('description'), state=issue_dict.get('state'), - created_at=issue_dict.get('created_at'), - updated_at=issue_dict.get('updated_at'), + created_at=convert_to_utc_datetime(issue_dict.get('created_at')), + updated_at=convert_to_utc_datetime(issue_dict.get('updated_at')), closed_by=closed_by, - closed_at=issue_dict.get('closed_at'), + closed_at=convert_to_utc_datetime(issue_dict.get('closed_at')), type=issue_dict.get('type'), - author=user.create_from_dict(issue_dict.get('author')), + author=author, confidential=issue_dict.get('confidential'), web_url=issue_dict.get('web_url'), ) diff --git a/src/gitlab_watchman/models/merge_request.py b/src/gitlab_watchman/models/merge_request.py index d022a9b..d51b6b8 100644 --- a/src/gitlab_watchman/models/merge_request.py +++ b/src/gitlab_watchman/models/merge_request.py @@ -1,10 +1,13 @@ from dataclasses import dataclass +from datetime import datetime -from . import user +from gitlab_watchman.models import user +from gitlab_watchman.utils import convert_to_utc_datetime @dataclass(slots=True) -class MergeRequest(object): +# pylint: disable=too-many-instance-attributes +class MergeRequest: """ Class that defines MergeRequest objects for GitLab merge requests""" id: str @@ -13,13 +16,13 @@ class MergeRequest(object): title: str description: str state: str - created_at: str - updated_at: str + created_at: datetime | None + updated_at: datetime | None merged_by: user.User - merged_at: str + merged_at: datetime | None target_branch: str source_branch: str - author: user.User + author: user.User | None source_project_id: str target_project_id: str merge_status: str @@ -39,6 +42,11 @@ def create_from_dict(mr_dict: dict) -> MergeRequest: else: merged_by = None + if mr_dict.get('author'): + author = user.create_from_dict(mr_dict.get('author')) + else: + author = None + return MergeRequest( id=mr_dict.get('id'), iid=mr_dict.get('iid'), @@ -46,13 +54,13 @@ def create_from_dict(mr_dict: dict) -> MergeRequest: title=mr_dict.get('title'), description=mr_dict.get('description'), state=mr_dict.get('state'), - created_at=mr_dict.get('created_at'), - updated_at=mr_dict.get('updated_at'), + created_at=convert_to_utc_datetime(mr_dict.get('created_at')), + updated_at=convert_to_utc_datetime(mr_dict.get('updated_at')), merged_by=merged_by, - merged_at=mr_dict.get('merged_at'), + merged_at=convert_to_utc_datetime(mr_dict.get('merged_at')), target_branch=mr_dict.get('target_branch'), source_branch=mr_dict.get('source_branch'), - author=user.create_from_dict(mr_dict.get('author')), + author=author, source_project_id=mr_dict.get('source_project_id'), target_project_id=mr_dict.get('target_project_id'), merge_status=mr_dict.get('merge_status'), diff --git a/src/gitlab_watchman/models/milestone.py b/src/gitlab_watchman/models/milestone.py index 79410c6..9a626ba 100644 --- a/src/gitlab_watchman/models/milestone.py +++ b/src/gitlab_watchman/models/milestone.py @@ -1,8 +1,12 @@ from dataclasses import dataclass +from datetime import datetime + +from gitlab_watchman.utils import convert_to_utc_datetime @dataclass(slots=True) -class Milestone(object): +# pylint: disable=too-many-instance-attributes +class Milestone: """ Class that defines Milestone objects for GitLab milestones""" id: str @@ -11,10 +15,10 @@ class Milestone(object): title: str description: str state: str - created_at: str - updated_at: str - due_date: str - start_date: str + created_at: datetime | None + updated_at: datetime | None + due_date: datetime | None + start_date: datetime | None expired: str web_url: str @@ -34,10 +38,10 @@ def create_from_dict(milestone_dict: dict) -> Milestone: title=milestone_dict.get('title'), description=milestone_dict.get('description'), state=milestone_dict.get('state'), - created_at=milestone_dict.get('created_at'), - updated_at=milestone_dict.get('updated_at'), - due_date=milestone_dict.get('due_date'), - start_date=milestone_dict.get('start_date'), + created_at=convert_to_utc_datetime(milestone_dict.get('created_at')), + updated_at=convert_to_utc_datetime(milestone_dict.get('updated_at')), + due_date=convert_to_utc_datetime(milestone_dict.get('due_date')), + start_date=convert_to_utc_datetime(milestone_dict.get('start_date')), expired=milestone_dict.get('expired'), web_url=milestone_dict.get('web_url'), project_id=milestone_dict.get('project_id') diff --git a/src/gitlab_watchman/models/note.py b/src/gitlab_watchman/models/note.py index 4b677ee..7e9cfa5 100644 --- a/src/gitlab_watchman/models/note.py +++ b/src/gitlab_watchman/models/note.py @@ -1,10 +1,13 @@ from dataclasses import dataclass +from datetime import datetime -from . import user +from gitlab_watchman.models import user +from gitlab_watchman.utils import convert_to_utc_datetime @dataclass(slots=True) -class Note(object): +# pylint: disable=too-many-instance-attributes +class Note: """ Class that defines User objects for GitLab notes""" id: str @@ -12,15 +15,15 @@ class Note(object): body: str attachment: str or bool author: user.User - created_at: str - updated_at: str + created_at: datetime | None + updated_at: datetime | None system: str noteable_id: str noteable_type: str commit_id: str resolvable: bool resolved_by: user.User - resolved_at: str + resolved_at: datetime | None confidential: str noteable_iid: str command_changes: str @@ -39,21 +42,26 @@ def create_from_dict(note_dict: dict) -> Note: else: resolved_by = None + if note_dict.get('author'): + author = user.create_from_dict(note_dict.get('author', {})) + else: + author = None + return Note( id=note_dict.get('id'), type=note_dict.get('type'), body=note_dict.get('body'), attachment=note_dict.get('attachment'), - author=user.create_from_dict(note_dict.get('author', {})), - created_at=note_dict.get('created_at'), - updated_at=note_dict.get('updated_at'), + author=author, + created_at=convert_to_utc_datetime(note_dict.get('created_at')), + updated_at=convert_to_utc_datetime(note_dict.get('updated_at')), system=note_dict.get('system'), noteable_id=note_dict.get('noteable_id'), noteable_type=note_dict.get('noteable_type'), commit_id=note_dict.get('commit_id'), resolvable=note_dict.get('resolvable'), resolved_by=resolved_by, - resolved_at=note_dict.get('resolved_at'), + resolved_at=convert_to_utc_datetime(note_dict.get('resolved_at')), confidential=note_dict.get('confidential'), noteable_iid=note_dict.get('noteable_iid'), command_changes=note_dict.get('command_changes'), diff --git a/src/gitlab_watchman/models/project.py b/src/gitlab_watchman/models/project.py index d2dada5..2f79006 100644 --- a/src/gitlab_watchman/models/project.py +++ b/src/gitlab_watchman/models/project.py @@ -1,11 +1,14 @@ +import datetime from dataclasses import dataclass from typing import List -from . import user +from gitlab_watchman.models import user +from gitlab_watchman.utils import convert_to_utc_datetime @dataclass(slots=True) -class Namespace(object): +class Namespace: + """ Class that defines Namespace objects for GitLab Projects""" id: str name: str path: str @@ -18,7 +21,7 @@ class Namespace(object): @dataclass(slots=True) -class Project(object): +class Project: """ Class that defines User objects for GitLab projects""" id: str @@ -27,9 +30,9 @@ class Project(object): name_with_namespace: str path: str path_with_namespace: str - created_at: str + created_at: datetime.datetime | None web_url: user.User - last_activity_at: str + last_activity_at: datetime.datetime | None namespace: Namespace @@ -48,19 +51,19 @@ def create_from_dict(project_dict: dict) -> Project: name=project_dict.get('name'), name_with_namespace=project_dict.get('name_with_namespace'), path=project_dict.get('path'), - created_at=project_dict.get('created_at'), + created_at=convert_to_utc_datetime(project_dict.get('created_at')), path_with_namespace=project_dict.get('path_with_namespace'), web_url=project_dict.get('web_url'), - last_activity_at=project_dict.get('last_activity_at'), + last_activity_at=convert_to_utc_datetime(project_dict.get('last_activity_at')), namespace=Namespace( - id=project_dict.get('namespace').get('id'), - name=project_dict.get('namespace').get('name'), - path=project_dict.get('namespace').get('path'), - kind=project_dict.get('namespace').get('kind'), - full_path=project_dict.get('namespace').get('full_path'), - parent_id=project_dict.get('namespace').get('parent_id'), - web_url=project_dict.get('namespace').get('web_url'), + id=project_dict.get('namespace', {}).get('id'), + name=project_dict.get('namespace', {}).get('name'), + path=project_dict.get('namespace', {}).get('path'), + kind=project_dict.get('namespace', {}).get('kind'), + full_path=project_dict.get('namespace', {}).get('full_path'), + parent_id=project_dict.get('namespace', {}).get('parent_id'), + web_url=project_dict.get('namespace', {}).get('web_url'), members=[], - owner=[] + owner=None ) ) diff --git a/src/gitlab_watchman/models/signature.py b/src/gitlab_watchman/models/signature.py index e1c744e..e311893 100644 --- a/src/gitlab_watchman/models/signature.py +++ b/src/gitlab_watchman/models/signature.py @@ -1,68 +1,85 @@ -import pathlib -import yaml +import datetime +from typing import Any, Dict, List from dataclasses import dataclass @dataclass(slots=True) -class Signature(object): +class TestCases: + """ Class that holds test cases for a signature """ + match_cases: list + fail_cases: list + + +@dataclass(frozen=True, slots=True) +# pylint: disable=too-many-instance-attributes +class Signature: """ Class that handles loaded signature objects. Signatures - define what to search for in GitLab and where to search for it. + define what to search for in Slack and where to search for it. They also contain regex patterns to validate data that is found""" name: str - status: bool + id: str + status: str author: str - date: str + date: str | datetime.date | datetime.datetime version: str description: str - severity: int - watchman_apps: list - scope: list - test_cases: dataclass - search_strings: str - patterns: str - + severity: int or str + watchman_apps: Dict[str, Any] + scope: List[str] + test_cases: TestCases + search_strings: List[str] + patterns: List[str] -@dataclass(slots=True) -class TestCases(object): - match_cases: list - fail_cases: list + def __post_init__(self): + if self.name and not isinstance(self.name, str): + raise TypeError(f'Expected `name` to be of type str, received {type(self.name).__name__}') + if self.id and not isinstance(self.id, str): + raise TypeError(f'Expected `id` to be of type str, received {type(self.id).__name__}') + if self.status and not isinstance(self.status, str): + raise TypeError(f'Expected `status` to be of type str, received {type(self.status).__name__}') + if self.author and not isinstance(self.author, str): + raise TypeError(f'Expected `author` to be of type str, received {type(self.author).__name__}') + if self.date and not isinstance(self.date, (datetime.date, datetime.datetime, str)): + raise TypeError(f'Expected `date` to be of type str, received {type(self.date).__name__}') + if self.version and not isinstance(self.version, str): + raise TypeError(f'Expected `version` to be of type str, received {type(self.version).__name__}') + if self.description and not isinstance(self.description, str): + raise TypeError(f'Expected `description` to be of type str, received {type(self.description).__name__}') + if self.severity and not isinstance(self.severity, (int, str)): + raise TypeError(f'Expected `severity` to be of type int or str, received {type(self.severity).__name__}') + if self.scope and not isinstance(self.scope, list): + raise TypeError(f'Expected `scope` to be of type list, received {type(self.scope).__name__}') + if self.search_strings and not isinstance(self.search_strings, list): + raise TypeError( + f'Expected `search_strings` to be of type list, received {type(self.search_strings).__name__}') + if self.patterns and not isinstance(self.patterns, list): + raise TypeError(f'Expected `patterns` to be of type list, received {type(self.patterns).__name__}') -def load_from_yaml(sig_path: pathlib.PosixPath) -> list[Signature]: - """Load YAML file and return a Signature object +def create_from_dict(signature_dict: Dict[str, Any]) -> Signature: + """ Create a Signature object from a dictionary Args: - sig_path: Path of YAML file + signature_dict: dict/JSON object signature Returns: - Signature object with fields populated from the YAML - signature file + Signature """ - with open(sig_path) as yaml_file: - yaml_import = yaml.safe_load(yaml_file) - - output = [] - for sig in yaml_import.get('signatures'): - if 'gitlab' in sig.get('watchman_apps'): - output.append( - Signature( - name=sig.get('name'), - status=sig.get('status'), - author=sig.get('author'), - date=sig.get('date'), - version=sig.get('version'), - description=sig.get('description'), - severity=sig.get('severity'), - watchman_apps=sig.get('watchman_apps'), - scope=sig.get('watchman_apps').get('gitlab').get('scope'), - test_cases=TestCases( - match_cases=sig.get('test_cases').get('match_cases'), - fail_cases=sig.get('test_cases').get('fail_cases') - ), - search_strings=sig.get('watchman_apps').get('gitlab').get('search_strings'), - patterns=sig.get('patterns') - ) - ) - - return output \ No newline at end of file + return Signature( + name=signature_dict.get('name'), + id=signature_dict.get('id'), + status=signature_dict.get('status'), + author=signature_dict.get('author'), + date=signature_dict.get('date'), + version=signature_dict.get('version'), + description=signature_dict.get('description'), + severity=signature_dict.get('severity'), + watchman_apps=signature_dict.get('watchman_apps'), + scope=signature_dict.get('watchman_apps', {}).get('gitlab', {}).get('scope'), + test_cases=TestCases( + match_cases=signature_dict.get('test_cases', {}).get('match_cases'), + fail_cases=signature_dict.get('test_cases', {}).get('fail_cases') + ), + search_strings=signature_dict.get('watchman_apps', {}).get('gitlab', {}).get('search_strings'), + patterns=signature_dict.get('patterns')) diff --git a/src/gitlab_watchman/models/snippet.py b/src/gitlab_watchman/models/snippet.py index 6b72156..bfce73c 100644 --- a/src/gitlab_watchman/models/snippet.py +++ b/src/gitlab_watchman/models/snippet.py @@ -1,29 +1,32 @@ from dataclasses import dataclass +from datetime import datetime +from typing import List -from . import user +from gitlab_watchman.models import user +from gitlab_watchman.utils import convert_to_utc_datetime @dataclass(slots=True) -class Snippet(object): +class File: + """ Class that defines File objects for GitLab snippets""" + path: str + raw_url: str + + +@dataclass(slots=True) +class Snippet: """ Class that defines User objects for GitLab snippets""" id: str title: str description: str visibility: str or bool - created_at: str - updated_at: str + created_at: datetime | None + updated_at: datetime | None web_url: str author: user.User file_name: str - files: list - - -@dataclass(slots=True) -class File(object): - - path: str - raw_url: str + files: List[File] def create_from_dict(snip_dict: dict) -> Snippet: @@ -44,14 +47,19 @@ def create_from_dict(snip_dict: dict) -> Snippet: else: file_list = None + if snip_dict.get('author'): + author = user.create_from_dict(snip_dict.get('author')) + else: + author = None + return Snippet( id=snip_dict.get('id'), title=snip_dict.get('title'), description=snip_dict.get('description'), visibility=snip_dict.get('visibility'), - author=user.create_from_dict(snip_dict.get('author', {})), - created_at=snip_dict.get('created_at'), - updated_at=snip_dict.get('updated_at'), + author=author, + created_at=convert_to_utc_datetime(snip_dict.get('created_at')), + updated_at=convert_to_utc_datetime(snip_dict.get('updated_at')), web_url=snip_dict.get('web_url'), file_name=snip_dict.get('file_name'), files=file_list diff --git a/src/gitlab_watchman/models/user.py b/src/gitlab_watchman/models/user.py index 970d206..dd4cf38 100644 --- a/src/gitlab_watchman/models/user.py +++ b/src/gitlab_watchman/models/user.py @@ -2,7 +2,7 @@ @dataclass(slots=True) -class User(object): +class User: """ Class that defines User objects for GitLab users""" id: str diff --git a/src/gitlab_watchman/models/wiki_blob.py b/src/gitlab_watchman/models/wiki_blob.py index 1c4d424..ddeeb91 100644 --- a/src/gitlab_watchman/models/wiki_blob.py +++ b/src/gitlab_watchman/models/wiki_blob.py @@ -2,7 +2,7 @@ @dataclass(slots=True) -class WikiBlob(object): +class WikiBlob: """ Class that defines WikiBlob objects for GitLab blobs""" basename: str @@ -11,7 +11,8 @@ class WikiBlob(object): filename: str id: str ref: str - project_id: str + project_id: str | None + group_id: str | None def create_from_dict(blob_dict: dict) -> WikiBlob: @@ -30,5 +31,6 @@ def create_from_dict(blob_dict: dict) -> WikiBlob: path=blob_dict.get('path'), filename=blob_dict.get('filename'), ref=blob_dict.get('ref'), - project_id=blob_dict.get('project_id') + project_id=blob_dict.get('project_id'), + group_id=blob_dict.get('group_id', None) ) diff --git a/src/gitlab_watchman/signature_downloader.py b/src/gitlab_watchman/signature_downloader.py new file mode 100644 index 0000000..b2cccb4 --- /dev/null +++ b/src/gitlab_watchman/signature_downloader.py @@ -0,0 +1,83 @@ +import io +import os +import sys +import traceback +import zipfile +from urllib.request import urlopen +from typing import List + +import yaml + +from gitlab_watchman.loggers import JSONLogger, StdoutLogger +from gitlab_watchman.models.signature import Signature, create_from_dict + +SIGNATURE_URL = 'https://github.com/PaperMtn/watchman-signatures/archive/main.zip' + + +class SignatureDownloader: + """ A class for downloading and processing signature files from a GitHub repository. """ + def __init__(self, logger: JSONLogger | StdoutLogger): + """ Initializes a SignatureDownloader object. + + Args: + logger (JSONLogger | StdoutLogger): The logger object to use for logging. + Returns: + None + """ + self.logger = logger + + def download_signatures(self) -> List[Signature]: + """ Download signatures from GitHub repository + + Returns: + List of downloaded Signature objects + """ + + try: + response = urlopen(SIGNATURE_URL) + signatures_zip_file = zipfile.ZipFile(io.BytesIO(response.read())) + signature_files = {} + signature_objects = [] + for file_path in signatures_zip_file.namelist(): + if file_path.endswith('/'): + continue + + signature_name = os.path.basename(file_path) + self.logger.log('DEBUG', f'Processing {file_path} ...') + + with signatures_zip_file.open(file_path) as source: + signature_files[signature_name] = source.read() + + if file_path.endswith('.yaml'): + signature_objects.append(self._process_signature(signature_files[signature_name])) + self.logger.log('SUCCESS', f'Downloaded signature file: {signature_name}') + else: + self.logger.log('DEBUG', f'Skipping unrecognized file: {file_path}') + + return [item for sublist in signature_objects for item in sublist] + + except Exception as e: + self.logger.log('CRITICAL', f'Error while processing the signature' + f' files from the download package: {e}') + self.logger.log('DEBUG', traceback.format_exc()) + sys.exit(1) + + @staticmethod + def _process_signature(signature_data: bytes) -> List[Signature]: + """ Process a signature data bytes object into a list of Signature objects. + + This function takes a bytes object containing signature data, parses it into a dictionary, + and then creates a list of Signature objects based on the parsed data. + + Args: + signature_data (bytes): A bytes object containing signature data. + Returns: + List[Signature]: A list of Signature objects created from the parsed signature data. + """ + + signature_dict = yaml.safe_load(io.StringIO(signature_data.decode('utf-8'))) + output = [] + for sig in signature_dict.get('signatures'): + if 'gitlab' in sig.get('watchman_apps') and sig.get('status') == 'enabled': + output.append(create_from_dict(sig)) + return output diff --git a/src/gitlab_watchman/signature_updater.py b/src/gitlab_watchman/signature_updater.py deleted file mode 100644 index faaeb5f..0000000 --- a/src/gitlab_watchman/signature_updater.py +++ /dev/null @@ -1,82 +0,0 @@ -import io -import os -import zipfile -import shutil -import sys -from datetime import datetime -from pathlib import Path -from urllib.request import urlopen - -from . import gw_logger - -SIGNATURE_URL = 'https://github.com/PaperMtn/watchman-signatures/archive/main.zip' - - -class SignatureUpdater(object): - def __init__(self, logger: gw_logger.JSONLogger): - self.application_path = str((Path(__file__).parents[2]).resolve()) - self.logger = logger - - def update_signatures(self): - - response = urlopen(SIGNATURE_URL) - - try: - sig_dir = os.path.join(self.application_path, 'watchman-signatures/') - for sub_directory in [ - '', - 'config_files', - 'competitive', - 'compliance', - 'tokens_and_credentials' - ]: - full_path = os.path.join(sig_dir, sub_directory) - if not os.path.exists(full_path): - os.makedirs(full_path) - except Exception as e: - self.logger.log('CRITICAL', 'Error while creating the signature-base directories') - sys.exit(1) - - try: - signatures_zip_file = zipfile.ZipFile(io.BytesIO(response.read())) - for file_path in signatures_zip_file.namelist(): - signature_name = os.path.basename(file_path) - if file_path.endswith('/'): - continue - - self.logger.log('DEBUG', f'Extracting {file_path} ...') - if '/competitive/' in file_path and file_path.endswith('.yaml'): - target_file = os.path.join(sig_dir, 'competitive', signature_name) - elif '/compliance/' in file_path and file_path.endswith('.yaml'): - target_file = os.path.join(sig_dir, 'compliance', signature_name) - elif '/config_files/' in file_path and file_path.endswith('.yaml'): - target_file = os.path.join(sig_dir, 'config_files', signature_name) - elif file_path.endswith('.yaml'): - target_file = os.path.join(sig_dir, 'tokens_and_credentials', signature_name) - elif file_path.endswith('.yaml'): - target_file = os.path.join(sig_dir, 'misc', signature_name) - else: - continue - - if os.path.exists(target_file): - existing_modified_date = datetime.utcfromtimestamp(os.path.getmtime(target_file)) - if datetime(*signatures_zip_file.getinfo(file_path).date_time) > existing_modified_date: - self.logger.log('SUCCESS', f'Signature updated to newest version: {signature_name}') - source = signatures_zip_file.open(file_path) - target = open(target_file, 'wb') - with source, target: - shutil.copyfileobj(source, target) - target.close() - source.close() - else: - self.logger.log('SUCCESS', f'New signature file: {signature_name}') - source = signatures_zip_file.open(file_path) - target = open(target_file, 'wb') - with source, target: - shutil.copyfileobj(source, target) - target.close() - source.close() - - except Exception as e: - self.logger.log('CRITICAL', f'Error while extracting the signature files from the download package {e}') - sys.exit(1) diff --git a/src/gitlab_watchman/utils.py b/src/gitlab_watchman/utils.py new file mode 100644 index 0000000..4487926 --- /dev/null +++ b/src/gitlab_watchman/utils.py @@ -0,0 +1,103 @@ +import json +import dataclasses +from datetime import datetime +from typing import List, Dict, Any + +import pytz + + +class EnhancedJSONEncoder(json.JSONEncoder): + """ JSON Encoder that handles datetime and dataclass objects""" + def default(self, o): + if isinstance(o, datetime): + return o.isoformat() + if dataclasses.is_dataclass(o): + return dataclasses.asdict(o) + return super().default(o) + + +def convert_to_epoch(timestamp: str | datetime) -> int | None: + """ Convert ISO 8601 formatted strings to int epoch timestamps + + ISO 8601 formatted strings are formatted as: + YYYY-MM-DDTHH:MM:SS.SSS+HH:MM + + Args: + timestamp: ISO 8601 formatted string, example: 2024-01-01T00:00:00.000+00:00 + Returns: + int epoch timestamp + """ + + try: + if isinstance(timestamp, datetime): + return int(timestamp.timestamp()) + else: + pattern = '%Y-%m-%dT%H:%M:%S.%f%z' + return int(datetime.strptime(timestamp, pattern).timestamp()) + except TypeError: + return None + + +def convert_to_utc_datetime(timestamp: str) -> datetime | None: + """ Convert ISO 8601 formatted strings to datetime objects. + Datetimes are returned in UTC. + Accepted inputs: + ISO 8601 Datetime with Timezone: YYYY-MM-DDTHH:MM:SS.SSS+HH:MM + ISO 8601 Datetime without Timezone: YYYY-MM-DDTHH:MM:SS.SSSZ + ISO 8601 Date: YYYY-MM-DD + + Args: + timestamp: ISO 8601 formatted string + Returns: + datetime object + """ + + try: + try: + dt = datetime.strptime(timestamp, '%Y-%m-%dT%H:%M:%S.%f%z') + return dt.astimezone(pytz.utc) + except ValueError: + try: + dt = datetime.strptime(timestamp, '%Y-%m-%dT%H:%M:%SZ') + return dt.astimezone(pytz.utc) + except ValueError: + return datetime.strptime(timestamp, '%Y-%m-%d') + except TypeError: + return None + + +def convert_to_dict(obj: Any) -> Dict: + """ Returns a dictionary object from a dataclass object or a dict + containing nested dataclass objects. + + Args: + obj: dataclass object or dict + Returns: + Dictionary object + """ + + json_object = json.dumps(obj, sort_keys=True, cls=EnhancedJSONEncoder) + return json.loads(json_object) + + +def deduplicate_results(input_list: List[Any]) -> List[Dict]: + """ Removes duplicates where results are returned by multiple queries. This is done + using the `watchman_id` field in the detection data to identify the same findings. + + The `watchman_id` is a hash that is generated for each finding from the match string, + meaning the same message won't be returned multiple times. + + Args: + input_list: List of dataclass objects + Returns: + List of JSON objects with duplicates removed + """ + + converted_dict_list = [convert_to_dict(t) for t in input_list] + return list({match.get('watchman_id'): match for match in reversed(converted_dict_list)}.values()) + + +def split_to_chunks(input_list, no_of_chunks): + """Split the input list into n amount of chunks""" + + return (input_list[i::no_of_chunks] for i in range(no_of_chunks)) diff --git a/src/gitlab_watchman/watchman_processor.py b/src/gitlab_watchman/watchman_processor.py new file mode 100644 index 0000000..790c77b --- /dev/null +++ b/src/gitlab_watchman/watchman_processor.py @@ -0,0 +1,563 @@ +import calendar +import multiprocessing +import re +import time +import traceback +import hashlib +from multiprocessing import Queue +from dataclasses import dataclass +from typing import List, Dict, Optional + +from requests.exceptions import SSLError + +from gitlab_watchman.clients.gitlab_client import GitLabAPIClient +from gitlab_watchman.exceptions import GitLabWatchmanAuthenticationError +from gitlab_watchman.loggers import JSONLogger, StdoutLogger, init_logger +from gitlab_watchman.models import ( + signature, + note, + snippet, + blob, + wiki_blob, + file, + commit, + user, + merge_request, + milestone, + issue, + project, + group +) +from gitlab_watchman.utils import ( + convert_to_epoch, + deduplicate_results, + split_to_chunks +) + +ALL_TIME = calendar.timegm(time.gmtime()) + 1576800000 + + +@dataclass +class WorkerArgs: + """ Dataclass for multiprocessing arguments """ + gitlab_client: GitLabAPIClient + search_result_list: List[Dict] + regex: re.Pattern[str] + timeframe: int + results_list: List[Dict] + verbose: bool + log_queue: Optional[Queue] = None + log_handler: Optional[JSONLogger | StdoutLogger] = None + + +def initiate_gitlab_connection(token: str, + url: str) -> GitLabAPIClient: + """ Create a GitLab API client object + + Returns: + GitLab API client object + Raises: + GitLabWatchmanAuthenticationError: If an error occurs while creating the GitLab API client object + """ + + try: + return GitLabAPIClient(token, url) + except SSLError as e: + raise GitLabWatchmanAuthenticationError('SSL Error: Please check your GitLab URL and try again') from e + except Exception as e: + raise e + + +def find_group_owners(group_members: List[Dict]) -> List[Dict]: + """ Return all users who are both active and group Owners + + Args: + group_members: Members of a GitLab group + Returns: + List of owners of a group + """ + + member_list = [] + for user_dict in group_members: + if user_dict.get('state') == 'active' and user_dict.get('access_level') == 50: + member_list.append({ + 'user_id': user_dict.get('id'), + 'name': user_dict.get('name'), + 'username': user_dict.get('username'), + 'access_level': 'Owner' + }) + + return member_list + + +def log_listener(log_queue: Queue, logging_type: str, debug: bool): + """ Listener for use in multiprocessing queued logging + + Args: + log_queue: Queue object + logging_type: Type of logging to use + debug: Whether to use debug level logging or not + """ + log_handler = init_logger(logging_type, debug) + while True: + record = log_queue.get() + if record is None: + break + level, message = record + log_handler.log(level, message) + + +def search(gitlab: GitLabAPIClient, + logging_type: str, + log_handler: JSONLogger | StdoutLogger, + debug: bool, + sig: signature.Signature, + scope: str, + verbose: bool, + timeframe: int = ALL_TIME) -> List[Dict] | None: + """ Use the appropriate search function to search GitLab based on the contents + of the signature file + + Args: + gitlab: GitLab API object + logging_type: Type of logging to use + log_handler: Logger object + debug: Whether to use debug level logging or not + sig: Signature object + scope: What sort of GitLab objects to search + verbose: Whether to use verbose logging + timeframe: Timeframe in seconds + Returns: + List of search results from GitLab API or None + + """ + results = [] + + if logging_type == 'json': + log_queue = Queue() + log_process = multiprocessing.Process(target=log_listener, args=(log_queue, logging_type, debug)) + log_process.start() + + for query in sig.search_strings: + for pattern in sig.patterns: + regex = re.compile(pattern) + search_results = gitlab.global_search(query, search_scope=scope) + query_formatted = query.replace('"', '') + if search_results: + if logging_type == 'json': + log_queue.put(('INFO', f'{len(search_results)} {scope} ' + f'found matching search term: {query_formatted}')) + else: + log_handler.log('INFO', f'{len(search_results)} {scope} ' + f'found matching search term: {query_formatted}') + + result = multiprocessing.Manager().list() + + chunks = multiprocessing.cpu_count() - 1 + list_of_chunks = split_to_chunks(search_results, chunks) + + processes = [] + + target_func_dict = { + 'blobs': _blob_worker, + 'wiki_blobs': _wiki_blob_worker, + 'commits': _commit_worker, + 'snippet_titles': _snippet_worker, + 'issues': _issue_worker, + 'milestones': _milestone_worker, + 'merge_requests': _merge_request_worker, + 'notes': _note_worker, + } + target_func = target_func_dict.get(scope, _blob_worker) + + for search_list in list_of_chunks: + multipro_args = WorkerArgs( + gitlab_client=gitlab, + search_result_list=search_list, + regex=regex, + timeframe=timeframe, + results_list=result, + verbose=verbose + ) + if logging_type == 'json': + multipro_args.log_queue = log_queue + else: + multipro_args.log_handler = log_handler + p = multiprocessing.Process(target=target_func, args=(multipro_args,)) + processes.append(p) + p.start() + + for process in processes: + process.join() + + results.append(list(result)) + else: + if logging_type == 'json': + log_queue.put(('INFO', f'No {scope} found matching search term: {query_formatted}')) + else: + log_handler.log('INFO', f'No {scope} found matching search term: {query_formatted}') + if results: + results = deduplicate_results([item for sublist in results for item in sublist]) + if logging_type == 'json': + log_queue.put(('INFO', f'{len(results)} total matches found after filtering')) + log_queue.put(None) + log_process.join() + else: + log_handler.log('INFO', f'{len(results)} total matches found after filtering') + return results + else: + if logging_type == 'json': + log_queue.put(('INFO', 'No matches found after filtering')) + log_queue.put(None) + log_process.join() + else: + log_handler.log('INFO', 'No matches found after filtering') + + +def _populate_project_owners(gitlab: GitLabAPIClient, + project_object: project.Project) -> project.Project: + """ Populates a given project with either the user who owns it if the namespace kind == user, + or members of the group who are owners if the namespace kind == group + + Args: + gitlab: GitLab API object + project_object: Project to populate the owners of + Returns: + Project object with owners populated + """ + + if project_object.namespace.kind == 'group': + group_members = gitlab.get_group_members(project_object.namespace.id) + owners = find_group_owners(group_members) + if owners: + owner_list = [] + for owner in owners: + owner_list.append(user.create_from_dict(owner)) + project_object.namespace.members = owners + project_object.namespace.owner = None + elif project_object.namespace.kind == 'user': + namespace_user = gitlab.get_user_by_username(project_object.namespace.full_path) + if namespace_user: + project_object.namespace.owner = user.create_from_dict(namespace_user) + project_object.namespace.members = None + + return project_object + + +def _blob_worker(args: WorkerArgs) -> List[Dict]: + """ MULTIPROCESSING WORKER - Iterates through a list of blobs to find matches against the regex + + Args: + args: Multiprocessing arguments containing the + GitLab client, search list, regex pattern, + timeframe, results list, verbosity flag, and log handler. + Returns: + Multiprocessing list to be combined by the parent process + """ + + now = calendar.timegm(time.gmtime()) + for blob_dict in args.search_result_list: + try: + blob_object = blob.create_from_dict(blob_dict) + project_object = project.create_from_dict(args.gitlab_client.get_project(blob_object.project_id)) + file_object = file.create_from_dict( + args.gitlab_client.get_file(blob_object.project_id, blob_object.path, blob_object.ref)) + if file_object: + commit_object = commit.create_from_dict( + args.gitlab_client.get_commit(blob_object.project_id, file_object.commit_id)) + if convert_to_epoch(commit_object.committed_date) > (now - args.timeframe) and args.regex.search( + str(blob_object.data)): + match_string = args.regex.search(str(blob_object.data)).group(0) + if not args.verbose: + setattr(blob_object, 'data', None) + watchman_id = hashlib.md5(f'{match_string}.{file_object.file_path}'.encode()).hexdigest() + args.results_list.append({ + 'match_string': match_string, + 'blob': blob_object, + 'commit': commit_object, + 'project': _populate_project_owners(args.gitlab_client, project_object), + 'file': file_object, + 'watchman_id': watchman_id + }) + except Exception as e: + if args.log_handler: + args.log_handler.log('WARNING', e) + args.log_handler.log('DEBUG', traceback.format_exc()) + else: + args.log_queue.put('WARNING', e) + args.log_queue.put('DEBUG', traceback.format_exc()) + return args.results_list + + +def _wiki_blob_worker(args: WorkerArgs) -> List[Dict]: + """ MULTIPROCESSING WORKER - Iterates through a list of wiki_blobs to find matches against the regex. + + Args: + args: Multiprocessing arguments containing the + GitLab client, search list, regex pattern, + timeframe, results list, verbosity flag, and log handler. + Returns: + List[Dict]: Multiprocessing list to be combined by the parent process. + """ + + for wb_dict in args.search_result_list: + try: + wikiblob_object = wiki_blob.create_from_dict(wb_dict) + project_wiki = False + group_wiki = False + if wb_dict.get('project_id'): + project_object = project.create_from_dict(args.gitlab_client.get_project(wb_dict.get('project_id'))) + project_wiki = True + if wb_dict.get('group_id'): + group_object = group.create_from_dict(args.gitlab_client.get_group(wb_dict.get('group_id'))) + group_wiki = True + + if args.regex.search( + str(wikiblob_object.data)): + match_string = args.regex.search(str(wikiblob_object.data)).group(0) + if not args.verbose: + setattr(wikiblob_object, 'data', None) + watchman_id = hashlib.md5(f'{match_string}.{wikiblob_object.path}'.encode()).hexdigest() + results_dict = { + 'match_string': match_string, + 'wiki_blob': wikiblob_object, + 'group_wiki': group_wiki, + 'project_wiki': project_wiki, + 'watchman_id': watchman_id + } + if project_wiki: + results_dict['project'] = _populate_project_owners(args.gitlab_client, project_object) + if group_wiki: + results_dict['group'] = group_object + args.results_list.append(results_dict) + except Exception as e: + if args.log_handler: + args.log_handler.log('WARNING', e) + args.log_handler.log('DEBUG', traceback.format_exc()) + else: + args.log_queue.put('WARNING', e) + args.log_queue.put('DEBUG', traceback.format_exc()) + return args.results_list + + +def _commit_worker(args: WorkerArgs) -> List[Dict]: + """ MULTIPROCESSING WORKER - Iterates through a list of commits to find matches against the regex + + Args: + args: Multiprocessing arguments containing the + GitLab client, search list, regex pattern, + timeframe, results list, verbosity flag, and log handler. + Returns: + List of JSON formatted results if any are found + """ + + now = calendar.timegm(time.gmtime()) + + for commit_dict in args.search_result_list: + try: + commit_object = commit.create_from_dict(commit_dict) + if convert_to_epoch(commit_object.committed_date) > (now - args.timeframe) and \ + args.regex.search(str(commit_object.message)): + project_object = project.create_from_dict(args.gitlab_client.get_project(commit_object.project_id)) + match_string = args.regex.search(str(commit_object.message)).group(0) + watchman_id = hashlib.md5(f'{match_string}.{commit_object.id}'.encode()).hexdigest() + args.results_list.append({ + 'match_string': match_string, + 'commit': commit_object, + 'project': _populate_project_owners(args.gitlab_client, project_object), + 'watchman_id': watchman_id + }) + except Exception as e: + if args.log_handler: + args.log_handler.log('WARNING', e) + args.log_handler.log('DEBUG', traceback.format_exc()) + else: + args.log_queue.put('WARNING', e) + args.log_queue.put('DEBUG', traceback.format_exc()) + return args.results_list + + +def _issue_worker(args: WorkerArgs) -> List[Dict]: + """ MULTIPROCESSING WORKER - Iterates through a list of issues to find matches against the regex + + Args: + args: Multiprocessing arguments containing the + GitLab client, search list, regex pattern, + timeframe, results list, verbosity flag, and log handler. + Returns: + Multiprocessing list to be combined by the parent process + """ + + now = calendar.timegm(time.gmtime()) + for issue_dict in args.search_result_list: + try: + issue_object = issue.create_from_dict(issue_dict) + if convert_to_epoch(issue_object.updated_at) > (now - args.timeframe) and \ + args.regex.search(str(issue_object.description)): + match_string = args.regex.search(str(issue_object.description)).group(0) + if not args.verbose: + setattr(issue_object, 'description', None) + project_object = project.create_from_dict(args.gitlab_client.get_project(issue_object.project_id)) + watchman_id = hashlib.md5(f'{match_string}.{issue_object.id}'.encode()).hexdigest() + args.results_list.append({ + 'match_string': match_string, + 'issue': issue_object, + 'project': _populate_project_owners(args.gitlab_client, project_object), + 'watchman_id': watchman_id + }) + except Exception as e: + if args.log_handler: + args.log_handler.log('WARNING', e) + args.log_handler.log('DEBUG', traceback.format_exc()) + else: + args.log_queue.put('WARNING', e) + args.log_queue.put('DEBUG', traceback.format_exc()) + return args.results_list + + +def _milestone_worker(args: WorkerArgs) -> List[Dict]: + """ MULTIPROCESSING WORKER - Iterates through a list of milestones to find matches against the regex + + Args: + args: Multiprocessing arguments containing the + Returns: + Multiprocessing list to be combined by the parent process + """ + + now = calendar.timegm(time.gmtime()) + for milestone_dict in args.search_result_list: + try: + milestone_object = milestone.create_from_dict(milestone_dict) + if convert_to_epoch(milestone_object.updated_at) > (now - args.timeframe) and \ + args.regex.search(str(milestone_object.description)): + project_object = project.create_from_dict(args.gitlab_client.get_project(milestone_object.project_id)) + match_string = args.regex.search(str(milestone_object.description)).group(0) + if not args.verbose: + setattr(milestone_object, 'description', None) + watchman_id = hashlib.md5(f'{match_string}.{milestone_object.id}'.encode()).hexdigest() + args.results_list.append({ + 'match_string': match_string, + 'milestone': milestone_object, + 'project': _populate_project_owners(args.gitlab_client, project_object), + 'watchman_id': watchman_id + }) + except Exception as e: + if args.log_handler: + args.log_handler.log('WARNING', e) + args.log_handler.log('DEBUG', traceback.format_exc()) + else: + args.log_queue.put('WARNING', e) + args.log_queue.put('DEBUG', traceback.format_exc()) + return args.results_list + + +def _merge_request_worker(args: WorkerArgs) -> List[Dict]: + """ MULTIPROCESSING WORKER - Iterates through a list of merge requests to find matches against the regex + + Args: + args: Multiprocessing arguments containing the + GitLab client, search list, regex pattern, + timeframe, results list, verbosity flag, and log handler. + Returns: + Multiprocessing list to be combined by the parent process + """ + + now = calendar.timegm(time.gmtime()) + for mr_dict in args.search_result_list: + try: + mr_object = merge_request.create_from_dict(mr_dict) + if convert_to_epoch(mr_object.updated_at) > (now - args.timeframe) and \ + args.regex.search(str(mr_object.description)): + project_object = project.create_from_dict(args.gitlab_client.get_project(mr_object.project_id)) + match_string = args.regex.search(str(mr_object.description)).group(0) + if not args.verbose: + setattr(mr_object, 'description', None) + watchman_id = hashlib.md5(f'{match_string}.{mr_object.id}'.encode()).hexdigest() + args.results_list.append({ + 'match_string': match_string, + 'merge_request': mr_object, + 'project': _populate_project_owners(args.gitlab_client, project_object), + 'watchman_id': watchman_id + }) + except Exception as e: + if args.log_handler: + args.log_handler.log('WARNING', e) + args.log_handler.log('DEBUG', traceback.format_exc()) + else: + args.log_queue.put('WARNING', e) + args.log_queue.put('DEBUG', traceback.format_exc()) + return args.results_list + + +def _note_worker(args: WorkerArgs) -> List[Dict]: + """ MULTIPROCESSING WORKER - Iterates through a list of notes to find matches against the regex + + Args: + args: Multiprocessing arguments containing the + GitLab client, search list, regex pattern, + timeframe, results list, verbosity flag, and log handler. + Returns: + Multiprocessing list to be combined by the parent process + """ + + now = calendar.timegm(time.gmtime()) + try: + for note_dict in args.search_result_list: + note_object = note.create_from_dict(note_dict) + if convert_to_epoch(note_object.created_at) > (now - args.timeframe) and \ + args.regex.search(str(note_object.body)): + match_string = args.regex.search(str(note_object.body)).group(0) + watchman_id = hashlib.md5(f'{match_string}.{note_object.id}'.encode()).hexdigest() + args.results_list.append({ + 'note': note_object, + 'match_string': match_string, + 'watchman_id': watchman_id + }) + except Exception as e: + if args.log_handler: + args.log_handler.log('WARNING', e) + args.log_handler.log('DEBUG', traceback.format_exc()) + else: + args.log_queue.put('WARNING', e) + args.log_queue.put('DEBUG', traceback.format_exc()) + return args.results_list + + +def _snippet_worker(args: WorkerArgs) -> List[Dict]: + """ MULTIPROCESSING WORKER - Iterates through a list of snippets to find matches against the regex + + Args: + args: Multiprocessing arguments containing the + GitLab client, search list, regex pattern, + timeframe, results list, verbosity flag, and log handler. + Returns: + Multiprocessing list to be combined by the parent process + """ + + now = calendar.timegm(time.gmtime()) + for snippet_dict in args.search_result_list: + try: + snippet_object = snippet.create_from_dict(snippet_dict) + if convert_to_epoch(snippet_object.created_at) > (now - args.timeframe) and \ + (args.regex.search(str(snippet_object.title)) or + args.regex.search(str(snippet_object.description))): + if args.regex.search(str(snippet_object.title)): + match_string = args.regex.search(str(snippet_object.title)).group(0) + else: + match_string = args.regex.search(str(snippet_object.description)).group(0) + if not args.verbose: + setattr(snippet_object, 'description', None) + watchman_id = hashlib.md5(f'{match_string}.{snippet_object.id}'.encode()).hexdigest() + args.results_list.append({ + 'snippet': snippet_object, + 'match_string': match_string, + 'watchman_id': watchman_id + }) + except Exception as e: + if args.log_handler: + args.log_handler.log('WARNING', e) + args.log_handler.log('DEBUG', traceback.format_exc()) + else: + args.log_queue.put('WARNING', e) + args.log_queue.put('DEBUG', traceback.format_exc()) + return args.results_list diff --git a/tests/unit/models/fixtures.py b/tests/unit/models/fixtures.py new file mode 100644 index 0000000..fdeef2b --- /dev/null +++ b/tests/unit/models/fixtures.py @@ -0,0 +1,573 @@ +import pytest + +from gitlab_watchman.models import ( + commit, + blob, + file, + group, + issue, + merge_request, + milestone, + note, + project, + snippet, + user, + wiki_blob, + signature +) + + +class GitLabMockData: + """ Holds mock data for GitLab API responses.""" + + MOCK_COMMIT_DICT = { + "id": "ed899a2f4b50b4370feeea94676502b42383c746", + "short_id": "ed899a2f4b5", + "title": "Replace sanitize with escape once", + "author_name": "Example User", + "author_email": "user@example.com", + "authored_date": "2021-09-20T11:50:22.001+00:00", + "committer_name": "Administrator", + "committer_email": "admin@example.com", + "committed_date": "2021-09-20T11:50:22.001+00:00", + "created_at": "2021-09-20T11:50:22.001+00:00", + "message": "Replace sanitize with escape once", + "parent_ids": ["6104942438c14ec7bd21c6cd5bd995272b3faff6"], + "web_url": "https://gitlab.example.com/janedoe/gitlab-foss/-/commit/" + "ed899a2f4b50b4370feeea94676502b42383c746", + "trailers": {}, + "extended_trailers": {} + } + + MOCK_BLOB_DICT = { + "basename": "README", + "data": "```\n\n## Installation\n\nQuick start using the [pre-built", + "path": "README.md", + "filename": "README.md", + "id": None, + "ref": "main", + "startline": 46, + "project_id": 6 + } + + MOCK_FILE_DICT = { + "file_name": "key.rb", + "file_path": "app/models/key.rb", + "size": 1476, + "encoding": "base64", + "content": "IyA9PSBTY2hlbWEgSW5mb3...", + "content_sha256": "4c294617b60715c1d218e61164a3abd4808a4284cbc30e6728a01ad9aada4481", + "ref": "main", + "blob_id": "79f7bbd25901e8334750839545a9bd021f0e4c83", + "commit_id": "d5a3ff139356ce33e37e73add446f16869741b50", + "last_commit_id": "570e7b2abdd848b95f2f578043fc23bd6f6fd24d", + "execute_filemode": False + } + + MOCK_GROUP_DICT = { + "id": 4, + "name": "Twitter", + "path": "twitter", + "description": "Aliquid qui quis dignissimos distinctio ut commodi voluptas est.", + "visibility": "public", + "avatar_url": None, + "web_url": "https://gitlab.example.com/groups/twitter", + "request_access_enabled": False, + "repository_storage": "default", + "full_name": "Twitter", + "full_path": "twitter", + "runners_token": "ba324ca7b1c77fc20bb9", + "file_template_project_id": 1, + "parent_id": None, + "enabled_git_access_protocol": "all", + "created_at": "2020-01-15T12:36:29.590Z", + "shared_with_groups": [ + { + "group_id": 28, + "group_name": "H5bp", + "group_full_path": "h5bp", + "group_access_level": 20, + "expires_at": None + } + ], + "prevent_sharing_groups_outside_hierarchy": False, + "ip_restriction_ranges": None, + "math_rendering_limits_enabled": None, + "lock_math_rendering_limits_enabled": None + } + + MOCK_ISSUE_DICT = { + "id": 83, + "iid": 1, + "project_id": 12, + "title": "Add file", + "description": "Add first file", + "state": "opened", + "created_at": "2018-01-24T06:02:15.514Z", + "updated_at": "2018-02-06T12:36:23.263Z", + "closed_at": '2018-02-06T12:36:23.263Z', + "closed_by": { + "id": 1, + "name": "Administrator", + "username": "root", + "state": "active", + "avatar_url": "https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon", + "web_url": "http://localhost:3000/root" + }, + "description_html": None, + "description_text": "Add first file", + "labels": [], + "milestone": None, + "assignees": [{ + "id": 20, + "name": "Ceola Deckow", + "username": "sammy.collier", + "state": "active", + "avatar_url": "https://www.gravatar.com/avatar/c23d85a4f50e0ea76ab739156c639231?s=80&d=identicon", + "web_url": "http://localhost:3000/sammy.collier" + }], + "author": { + "id": 1, + "name": "Administrator", + "username": "root", + "state": "active", + "avatar_url": "https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon", + "web_url": "http://localhost:3000/root" + }, + "assignee": { + "id": 20, + "name": "Ceola Deckow", + "username": "sammy.collier", + "state": "active", + "avatar_url": "https://www.gravatar.com/avatar/c23d85a4f50e0ea76ab739156c639231?s=80&d=identicon", + "web_url": "http://localhost:3000/sammy.collier" + }, + "user_notes_count": 0, + "upvotes": 0, + "downvotes": 0, + "due_date": None, + "confidential": False, + "discussion_locked": None, + "web_url": "http://localhost:3000/h5bp/7bp/subgroup-prj/issues/1", + "time_stats": { + "time_estimate": 0, + "total_time_spent": 0, + "human_time_estimate": None, + "human_total_time_spent": None + } + } + + MOCK_MERGE_REQUEST_DICT = { + "id": 56, + "iid": 8, + "project_id": 6, + "title": "Add first file", + "description": "This is a test MR to add file", + "state": "opened", + "created_at": "2018-01-22T14:21:50.830Z", + "updated_at": "2018-02-06T12:40:33.295Z", + "target_branch": "main", + "source_branch": "jaja-test", + "upvotes": 0, + "downvotes": 0, + "author": { + "id": 1, + "name": "Administrator", + "username": "root", + "state": "active", + "avatar_url": "https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon", + "web_url": "http://localhost:3000/root" + }, + "assignee": { + "id": 5, + "name": "Jacquelyn Kutch", + "username": "abigail", + "state": "active", + "avatar_url": "https://www.gravatar.com/avatar/3138c66095ee4bd11a508c2f7f7772da?s=80&d=identicon", + "web_url": "http://localhost:3000/abigail" + }, + "source_project_id": 6, + "target_project_id": 6, + "labels": [ + "ruby", + "tests" + ], + "draft": False, + "work_in_progress": False, + "milestone": { + "id": 13, + "iid": 3, + "project_id": 6, + "title": "v2.0", + "description": "Qui aut qui eos dolor beatae itaque tempore molestiae.", + "state": "active", + "created_at": "2017-09-05T07:58:29.099Z", + "updated_at": "2017-09-05T07:58:29.099Z", + "due_date": None, + "start_date": None + }, + "merge_when_pipeline_succeeds": False, + "merge_status": "can_be_merged", + "sha": "78765a2d5e0a43585945c58e61ba2f822e4d090b", + "merge_commit_sha": None, + "squash_commit_sha": None, + "user_notes_count": 0, + "discussion_locked": None, + "should_remove_source_branch": None, + "force_remove_source_branch": True, + "web_url": "http://localhost:3000/twitter/flight/merge_requests/8", + "time_stats": { + "time_estimate": 0, + "total_time_spent": 0, + "human_time_estimate": None, + "human_total_time_spent": None + } + } + + MOCK_MILESTONE_DICT = { + "id": 44, + "iid": 1, + "project_id": 12, + "title": "next release", + "description": "Next release milestone", + "state": "active", + "created_at": "2018-02-06T12:43:39.271Z", + "updated_at": "2018-02-06T12:44:01.298Z", + "due_date": "2018-04-18", + "start_date": "2018-02-04" + } + + MOCK_NOTE_DICT = { + "id": 191, + "body": "Harum maxime consequuntur et et deleniti assumenda facilis.", + "attachment": None, + "author": { + "id": 23, + "name": "User 1", + "username": "user1", + "state": "active", + "avatar_url": "https://www.gravatar.com/avatar/111d68d06e2d317b5a59c2c6c5bad808?s=80&d=identicon", + "web_url": "http://localhost:3000/user1" + }, + "created_at": "2017-09-05T08:01:32.068Z", + "updated_at": "2017-09-05T08:01:32.068Z", + "system": None, + "noteable_id": 22, + "noteable_type": "Issue", + "project_id": 6, + "noteable_iid": 2 + } + + MOCK_PROJECT_DICT = { + "id": 3, + "description": "Lorem ipsum dolor sit amet, consectetur adipiscing elit.", + "description_html": "

Lorem ipsum dolor sit amet, consectetur adipiscing elit.

", + "default_branch": "main", + "visibility": "private", + "ssh_url_to_repo": "git@example.com:diaspora/diaspora-project-site.git", + "http_url_to_repo": "http://example.com/diaspora/diaspora-project-site.git", + "web_url": "http://example.com/diaspora/diaspora-project-site", + "readme_url": "http://example.com/diaspora/diaspora-project-site/blob/main/README.md", + "tag_list": [ + "example", + "disapora project" + ], + "topics": [ + "example", + "disapora project" + ], + "owner": { + "id": 3, + "name": "Diaspora", + "created_at": "2013-09-30T13:46:02Z" + }, + "name": "Diaspora Project Site", + "name_with_namespace": "Diaspora / Diaspora Project Site", + "path": "diaspora-project-site", + "path_with_namespace": "diaspora/diaspora-project-site", + "issues_enabled": True, + "open_issues_count": 1, + "merge_requests_enabled": True, + "jobs_enabled": True, + "wiki_enabled": True, + "snippets_enabled": False, + "can_create_merge_request_in": True, + "resolve_outdated_diff_discussions": False, + "container_registry_enabled": False, + "container_registry_access_level": "disabled", + "security_and_compliance_access_level": "disabled", + "container_expiration_policy": { + "cadence": "7d", + "enabled": False, + "keep_n": None, + "older_than": None, + "name_regex": None, + "name_regex_delete": None, + "name_regex_keep": None, + "next_run_at": "2020-01-07T21:42:58.658Z" + }, + "created_at": "2013-09-30T13:46:02Z", + "updated_at": "2013-09-30T13:46:02Z", + "last_activity_at": "2013-09-30T13:46:02Z", + "creator_id": 3, + "namespace": { + "id": 3, + "name": "Diaspora", + "path": "diaspora", + "kind": "group", + "full_path": "diaspora", + "avatar_url": "http://localhost:3000/uploads/group/avatar/3/foo.jpg", + "web_url": "http://localhost:3000/groups/diaspora" + }, + "import_url": None, + "import_type": None, + "import_status": "none", + "import_error": None, + "permissions": { + "project_access": { + "access_level": 10, + "notification_level": 3 + }, + "group_access": { + "access_level": 50, + "notification_level": 3 + } + }, + "archived": False, + "avatar_url": "http://example.com/uploads/project/avatar/3/uploads/avatar.png", + "license_url": "http://example.com/diaspora/diaspora-client/blob/main/LICENSE", + "license": { + "key": "lgpl-3.0", + "name": "GNU Lesser General Public License v3.0", + "nickname": "GNU LGPLv3", + "html_url": "http://choosealicense.com/licenses/lgpl-3.0/", + "source_url": "http://www.gnu.org/licenses/lgpl-3.0.txt" + }, + "shared_runners_enabled": True, + "group_runners_enabled": True, + "forks_count": 0, + "star_count": 0, + "runners_token": "b8bc4a7a29eb76ea83cf79e4908c2b", + "ci_default_git_depth": 50, + "ci_forward_deployment_enabled": True, + "ci_forward_deployment_rollback_allowed": True, + "ci_allow_fork_pipelines_to_run_in_parent_project": True, + "ci_separated_caches": True, + "ci_restrict_pipeline_cancellation_role": "developer", + "ci_pipeline_variables_minimum_override_role": "maintainer", + "ci_push_repository_for_job_token_allowed": False, + "public_jobs": True, + "shared_with_groups": [ + { + "group_id": 4, + "group_name": "Twitter", + "group_full_path": "twitter", + "group_access_level": 30 + }, + { + "group_id": 3, + "group_name": "Gitlab Org", + "group_full_path": "gitlab-org", + "group_access_level": 10 + } + ], + "repository_storage": "default", + "only_allow_merge_if_pipeline_succeeds": False, + "allow_merge_on_skipped_pipeline": False, + "allow_pipeline_trigger_approve_deployment": False, + "restrict_user_defined_variables": False, + "only_allow_merge_if_all_discussions_are_resolved": False, + "remove_source_branch_after_merge": False, + "printing_merge_requests_link_enabled": True, + "request_access_enabled": False, + "merge_method": "merge", + "squash_option": "default_on", + "auto_devops_enabled": True, + "auto_devops_deploy_strategy": "continuous", + "approvals_before_merge": 0, + "mirror": False, + "mirror_user_id": 45, + "mirror_trigger_builds": False, + "only_mirror_protected_branches": False, + "mirror_overwrites_diverged_branches": False, + "external_authorization_classification_label": None, + "packages_enabled": True, + "service_desk_enabled": False, + "service_desk_address": None, + "autoclose_referenced_issues": True, + "suggestion_commit_message": None, + "enforce_auth_checks_on_uploads": True, + "merge_commit_template": None, + "squash_commit_template": None, + "issue_branch_template": "gitlab/%{id}-%{title}", + "marked_for_deletion_at": "2020-04-03", + "marked_for_deletion_on": "2020-04-03", + "compliance_frameworks": ["sox"], + "warn_about_potentially_unwanted_characters": True, + "statistics": { + "commit_count": 37, + "storage_size": 1038090, + "repository_size": 1038090, + "wiki_size": 0, + "lfs_objects_size": 0, + "job_artifacts_size": 0, + "pipeline_artifacts_size": 0, + "packages_size": 0, + "snippets_size": 0, + "uploads_size": 0, + "container_registry_size": 0 + }, + "container_registry_image_prefix": "registry.example.com/diaspora/diaspora-client", + "_links": { + "self": "http://example.com/api/v4/projects", + "issues": "http://example.com/api/v4/projects/1/issues", + "merge_requests": "http://example.com/api/v4/projects/1/merge_requests", + "repo_branches": "http://example.com/api/v4/projects/1/repository_branches", + "labels": "http://example.com/api/v4/projects/1/labels", + "events": "http://example.com/api/v4/projects/1/events", + "members": "http://example.com/api/v4/projects/1/members", + "cluster_agents": "http://example.com/api/v4/projects/1/cluster_agents" + } + } + + MOCK_SNIPPET_DICT = { + "id": 1, + "title": "test", + "file_name": "add.rb", + "description": "Ruby test snippet", + "author": { + "id": 1, + "username": "john_smith", + "email": "john@example.com", + "name": "John Smith", + "state": "active", + "created_at": "2012-05-23T08:00:58Z" + }, + "updated_at": "2012-06-28T10:52:04Z", + "created_at": "2012-06-28T10:52:04Z", + "imported": False, + "imported_from": "none", + "project_id": 1, + "web_url": "http://example.com/example/example/snippets/1", + "raw_url": "http://example.com/example/example/snippets/1/raw" + } + + MOCK_USER_DICT = { + "id": 1, + "username": "john_smith", + "name": "John Smith", + "state": "active", + "locked": False, + "avatar_url": "http://localhost:3000/uploads/user/avatar/1/cd8.jpeg", + "web_url": "http://localhost:3000/john_smith" + } + + MOCK_WIKI_BLOB_DICT = { + "basename": "home", + "data": "hello\n\nand bye\n\nend", + "path": "home.md", + "filename": "home.md", + "id": None, + "ref": "main", + "startline": 5, + "project_id": 6, + "group_id": None + } + + MOCK_SIGNATURE_DICT = { + 'name': 'Akamai API Access Tokens', + 'id': 'akamai_api_access_tokens', + 'status': 'enabled', + 'author': 'PaperMtn', + 'date': '2023-12-22', + 'description': 'Detects exposed Akamai API Access tokens', + 'severity': '90', + 'notes': None, + 'references': None, + 'watchman_apps': { + 'gitlab': { + 'scope': [ + 'blobs' + ], + 'search_strings': [ + 'akab-' + ] + } + }, + 'test_cases': { + 'match_cases': [ + 'client_token: akab-rWdcwwASNbe9fcGk-00qwecOueticOXxA' + ], + 'fail_cases': [ + 'host: akab-fakehost.akamaiapis.net' + ] + }, + 'patterns': [ + 'akab-[0-9a-zA-Z]{16}-[0-9a-zA-Z]{16}' + ] + } + + +@pytest.fixture +def mock_commit(): + return commit.create_from_dict(GitLabMockData.MOCK_COMMIT_DICT) + + +@pytest.fixture +def mock_blob(): + return blob.create_from_dict(GitLabMockData.MOCK_BLOB_DICT) + + +@pytest.fixture +def mock_file(): + return file.create_from_dict(GitLabMockData.MOCK_FILE_DICT) + + +@pytest.fixture +def mock_group(): + return group.create_from_dict(GitLabMockData.MOCK_GROUP_DICT) + + +@pytest.fixture +def mock_issue(): + return issue.create_from_dict(GitLabMockData.MOCK_ISSUE_DICT) + + +@pytest.fixture +def mock_merge_request(): + return merge_request.create_from_dict(GitLabMockData.MOCK_MERGE_REQUEST_DICT) + + +@pytest.fixture +def mock_milestone(): + return milestone.create_from_dict(GitLabMockData.MOCK_MILESTONE_DICT) + + +@pytest.fixture +def mock_note(): + return note.create_from_dict(GitLabMockData.MOCK_NOTE_DICT) + + +@pytest.fixture +def mock_project(): + return project.create_from_dict(GitLabMockData.MOCK_PROJECT_DICT) + + +@pytest.fixture +def mock_snippet(): + return snippet.create_from_dict(GitLabMockData.MOCK_SNIPPET_DICT) + + +@pytest.fixture +def mock_user(): + return user.create_from_dict(GitLabMockData.MOCK_USER_DICT) + + +@pytest.fixture +def mock_wiki_blob(): + return wiki_blob.create_from_dict(GitLabMockData.MOCK_WIKI_BLOB_DICT) + + +@pytest.fixture +def mock_signature(): + return signature.create_from_dict(GitLabMockData.MOCK_SIGNATURE_DICT) diff --git a/tests/unit/models/test_unit_blob.py b/tests/unit/models/test_unit_blob.py new file mode 100644 index 0000000..9cb41b0 --- /dev/null +++ b/tests/unit/models/test_unit_blob.py @@ -0,0 +1,40 @@ +from gitlab_watchman.models import blob + +from fixtures import ( + GitLabMockData, + mock_blob +) + + +def test_blob_initialisation(mock_blob): + # Test that the Blob object is of the correct type + assert isinstance(mock_blob, blob.Blob) + + # Test that the Blob object has the correct attributes + assert mock_blob.id == GitLabMockData.MOCK_BLOB_DICT.get('id') + assert mock_blob.basename == GitLabMockData.MOCK_BLOB_DICT.get('basename') + assert mock_blob.data == GitLabMockData.MOCK_BLOB_DICT.get('data') + assert mock_blob.path == GitLabMockData.MOCK_BLOB_DICT.get('path') + assert mock_blob.filename == GitLabMockData.MOCK_BLOB_DICT.get('filename') + assert mock_blob.ref == GitLabMockData.MOCK_BLOB_DICT.get('ref') + assert mock_blob.project_id == GitLabMockData.MOCK_BLOB_DICT.get('project_id') + + +def test_blob_missing_fields(): + # Create dict with missing fields + blob_dict = { + "id": "ed899a2f4b50b4370feeea94676502b42383c746", + "basename": "ed899a2f4b5", + } + blob_object = blob.create_from_dict(blob_dict) + # Test that the Blob object is of the correct type + assert isinstance(blob_object, blob.Blob) + + # Test that the Blob object has the correct attributes + assert blob_object.id == blob_dict.get('id') + assert blob_object.basename == blob_dict.get('basename') + assert blob_object.data is None + assert blob_object.path is None + assert blob_object.filename is None + assert blob_object.ref is None + assert blob_object.project_id is None diff --git a/tests/unit/models/test_unit_commit.py b/tests/unit/models/test_unit_commit.py new file mode 100644 index 0000000..219f96f --- /dev/null +++ b/tests/unit/models/test_unit_commit.py @@ -0,0 +1,53 @@ +from gitlab_watchman.models import commit +from gitlab_watchman.utils import convert_to_utc_datetime + +from fixtures import ( + GitLabMockData, + mock_commit +) + + +def test_commit_initialisation(mock_commit): + # Test that the Conversation object is of the correct type + assert isinstance(mock_commit, commit.Commit) + + # Test that the Conversation object has the correct attributes + assert mock_commit.id == GitLabMockData.MOCK_COMMIT_DICT.get('id') + assert mock_commit.created_at == convert_to_utc_datetime(GitLabMockData.MOCK_COMMIT_DICT.get('created_at')) + assert mock_commit.title == GitLabMockData.MOCK_COMMIT_DICT.get('title') + assert mock_commit.message == GitLabMockData.MOCK_COMMIT_DICT.get('message') + assert mock_commit.author_name == GitLabMockData.MOCK_COMMIT_DICT.get('author_name') + assert mock_commit.author_email == GitLabMockData.MOCK_COMMIT_DICT.get('author_email') + assert mock_commit.authored_date == convert_to_utc_datetime(GitLabMockData.MOCK_COMMIT_DICT.get('authored_date')) + assert mock_commit.committer_name == GitLabMockData.MOCK_COMMIT_DICT.get('committer_name') + assert mock_commit.committer_email == GitLabMockData.MOCK_COMMIT_DICT.get('committer_email') + assert mock_commit.committed_date == convert_to_utc_datetime(GitLabMockData.MOCK_COMMIT_DICT.get('committed_date')) + assert mock_commit.web_url == GitLabMockData.MOCK_COMMIT_DICT.get('web_url') + assert mock_commit.status == GitLabMockData.MOCK_COMMIT_DICT.get('status') + assert mock_commit.project_id == GitLabMockData.MOCK_COMMIT_DICT.get('project_id') + + +def test_commit_missing_fields(): + # Create dict with missing fields + commit_dict = { + "id": "ed899a2f4b50b4370feeea94676502b42383c746", + "short_id": "ed899a2f4b5", + } + commit_object = commit.create_from_dict(commit_dict) + # Test that the Conversation object is of the correct type + assert isinstance(commit_object, commit.Commit) + + # Test that the Conversation object has the correct attributes + assert commit_object.id == commit_dict.get('id') + assert commit_object.created_at is None + assert commit_object.title is None + assert commit_object.message is None + assert commit_object.author_name is None + assert commit_object.author_email is None + assert commit_object.authored_date is None + assert commit_object.committer_name is None + assert commit_object.committer_email is None + assert commit_object.committed_date is None + assert commit_object.web_url is None + assert commit_object.status is None + assert commit_object.project_id is None diff --git a/tests/unit/models/test_unit_file.py b/tests/unit/models/test_unit_file.py new file mode 100644 index 0000000..a07de7d --- /dev/null +++ b/tests/unit/models/test_unit_file.py @@ -0,0 +1,40 @@ +from gitlab_watchman.models import file + +from fixtures import ( + GitLabMockData, + mock_file +) + + +def test_file_initialisation(mock_file): + # Test that the File object is of the correct type + assert isinstance(mock_file, file.File) + + # Test that the File object has the correct attributes + assert mock_file.file_name == GitLabMockData.MOCK_FILE_DICT.get('file_name') + assert mock_file.file_path == GitLabMockData.MOCK_FILE_DICT.get('file_path') + assert mock_file.size == GitLabMockData.MOCK_FILE_DICT.get('size') + assert mock_file.encoding == GitLabMockData.MOCK_FILE_DICT.get('encoding') + assert mock_file.ref == GitLabMockData.MOCK_FILE_DICT.get('ref') + assert mock_file.commit_id == GitLabMockData.MOCK_FILE_DICT.get('commit_id') + assert mock_file.last_commit_id == GitLabMockData.MOCK_FILE_DICT.get('last_commit_id') + + +def test_file_missing_fields(): + # Create dict with missing fields + file_dict = { + "file_name": "my_file.txt", + "size": "10", + } + file_object = file.create_from_dict(file_dict) + # Test that the File object is of the correct type + assert isinstance(file_object, file.File) + + # Test that the File object has the correct attributes + assert file_object.file_name == file_dict.get('file_name') + assert file_object.file_path is None + assert file_object.size == file_dict.get('size') + assert file_object.encoding is None + assert file_object.ref is None + assert file_object.commit_id is None + assert file_object.last_commit_id is None \ No newline at end of file diff --git a/tests/unit/models/test_unit_group.py b/tests/unit/models/test_unit_group.py new file mode 100644 index 0000000..067b496 --- /dev/null +++ b/tests/unit/models/test_unit_group.py @@ -0,0 +1,57 @@ +from gitlab_watchman.models import group +from gitlab_watchman.utils import convert_to_utc_datetime + +from fixtures import ( + GitLabMockData, + mock_group +) + + +def test_group_initialisation(mock_group): + # Test that the Group object is of the correct type + assert isinstance(mock_group, group.Group) + + # Test that the Group object has the correct attributes + assert mock_group.id == GitLabMockData.MOCK_GROUP_DICT.get('id') + assert mock_group.name == GitLabMockData.MOCK_GROUP_DICT.get('name') + assert mock_group.path == GitLabMockData.MOCK_GROUP_DICT.get('path') + assert mock_group.description == GitLabMockData.MOCK_GROUP_DICT.get('description') + assert mock_group.visibility == GitLabMockData.MOCK_GROUP_DICT.get('visibility') + assert mock_group.require_two_factor_authentication == GitLabMockData.MOCK_GROUP_DICT.get('require_two_factor_authentication') + assert mock_group.two_factor_grace_period == GitLabMockData.MOCK_GROUP_DICT.get('two_factor_grace_period') + assert mock_group.auto_devops_enabled == GitLabMockData.MOCK_GROUP_DICT.get('auto_devops_enabled') + assert mock_group.emails_disabled == GitLabMockData.MOCK_GROUP_DICT.get('emails_disabled') + assert mock_group.request_access_enabled == GitLabMockData.MOCK_GROUP_DICT.get('request_access_enabled') + assert mock_group.full_name == GitLabMockData.MOCK_GROUP_DICT.get('full_name') + assert mock_group.full_path == GitLabMockData.MOCK_GROUP_DICT.get('full_path') + assert mock_group.created_at == convert_to_utc_datetime(GitLabMockData.MOCK_GROUP_DICT.get('created_at')) + assert mock_group.web_url == GitLabMockData.MOCK_GROUP_DICT.get('web_url') + assert mock_group.ip_restriction_ranges == GitLabMockData.MOCK_GROUP_DICT.get('ip_restriction_ranges') + + +def test_group_missing_fields(): + # Create dict with missing fields + group_dict = { + "id": "ed899a2f4b50b4370feeea94676502b42383c746", + "name": "my_group", + } + group_object = group.create_from_dict(group_dict) + # Test that the Group object is of the correct type + assert isinstance(group_object, group.Group) + + # Test that the Group object has the correct attributes + assert group_object.id == group_dict.get('id') + assert group_object.name == group_dict.get('name') + assert group_object.path is None + assert group_object.description is None + assert group_object.visibility is None + assert group_object.require_two_factor_authentication is None + assert group_object.two_factor_grace_period is None + assert group_object.auto_devops_enabled is None + assert group_object.emails_disabled is None + assert group_object.request_access_enabled is None + assert group_object.full_name is None + assert group_object.full_path is None + assert group_object.created_at is None + assert group_object.web_url is None + assert group_object.ip_restriction_ranges is None \ No newline at end of file diff --git a/tests/unit/models/test_unit_issue.py b/tests/unit/models/test_unit_issue.py new file mode 100644 index 0000000..2532553 --- /dev/null +++ b/tests/unit/models/test_unit_issue.py @@ -0,0 +1,70 @@ +from gitlab_watchman.models import issue, user +from gitlab_watchman.utils import convert_to_utc_datetime + +from fixtures import ( + GitLabMockData, + mock_issue +) + + +def test_issue_initialisation(mock_issue): + # Test that the Issue object is of the correct type + assert isinstance(mock_issue, issue.Issue) + + # Test that the Issue object has the correct attributes + assert mock_issue.id == GitLabMockData.MOCK_ISSUE_DICT.get('id') + assert mock_issue.iid == GitLabMockData.MOCK_ISSUE_DICT.get('iid') + assert mock_issue.project_id == GitLabMockData.MOCK_ISSUE_DICT.get('project_id') + assert mock_issue.title == GitLabMockData.MOCK_ISSUE_DICT.get('title') + assert mock_issue.description == GitLabMockData.MOCK_ISSUE_DICT.get('description') + assert mock_issue.state == GitLabMockData.MOCK_ISSUE_DICT.get('state') + assert mock_issue.created_at == convert_to_utc_datetime(GitLabMockData.MOCK_ISSUE_DICT.get('created_at')) + assert mock_issue.updated_at == convert_to_utc_datetime(GitLabMockData.MOCK_ISSUE_DICT.get('updated_at')) + assert mock_issue.closed_by == user.create_from_dict(GitLabMockData.MOCK_ISSUE_DICT.get('closed_by')) + assert mock_issue.closed_at == convert_to_utc_datetime(GitLabMockData.MOCK_ISSUE_DICT.get('closed_at')) + assert mock_issue.type == GitLabMockData.MOCK_ISSUE_DICT.get('type') + assert mock_issue.author == user.create_from_dict( + GitLabMockData.MOCK_ISSUE_DICT.get('author')), GitLabMockData.MOCK_ISSUE_DICT.get('author') + assert mock_issue.confidential == GitLabMockData.MOCK_ISSUE_DICT.get('confidential') + assert mock_issue.web_url == GitLabMockData.MOCK_ISSUE_DICT.get('web_url') + + +def test_issues_missing_fields(): + # Create dict with missing fields + issue_dict = { + "id": "ed899a2f4b50b4370feeea94676502b42383c746", + "iid": "1", + } + issue_object = issue.create_from_dict(issue_dict) + # Test that the Issue object is of the correct type + assert isinstance(issue_object, issue.Issue) + + # Test that the Issue object has the correct attributes + assert issue_object.id == issue_dict.get('id') + assert issue_object.iid == issue_dict.get('iid') + assert issue_object.project_id is None + assert issue_object.title is None + assert issue_object.description is None + assert issue_object.state is None + assert issue_object.created_at is None + assert issue_object.updated_at is None + assert issue_object.closed_by is None + assert issue_object.closed_at is None + assert issue_object.type is None + assert issue_object.author is None + assert issue_object.confidential is None + assert issue_object.web_url is None + + +def test_issue_user_initialisation(mock_issue): + # Test creating a user object with the response from the GitLab API + + # Test that the User object is of the correct type + assert isinstance(mock_issue.author, user.User) + + # Test that the User object has the correct attributes + assert mock_issue.author.id == GitLabMockData.MOCK_ISSUE_DICT.get('author').get('id') + assert mock_issue.author.name == GitLabMockData.MOCK_ISSUE_DICT.get('author').get('name') + assert mock_issue.author.username == GitLabMockData.MOCK_ISSUE_DICT.get('author').get('username') + assert mock_issue.author.state == GitLabMockData.MOCK_ISSUE_DICT.get('author').get('state') + assert mock_issue.author.web_url == GitLabMockData.MOCK_ISSUE_DICT.get('author').get('web_url') diff --git a/tests/unit/models/test_unit_merge_request.py b/tests/unit/models/test_unit_merge_request.py new file mode 100644 index 0000000..81eaeb7 --- /dev/null +++ b/tests/unit/models/test_unit_merge_request.py @@ -0,0 +1,88 @@ +from gitlab_watchman.models import merge_request, user +from gitlab_watchman.utils import convert_to_utc_datetime + +from fixtures import ( + GitLabMockData, + mock_merge_request +) + + +def test_merge_request_initialisation(mock_merge_request): + # Test that the MergeRequest object is of the correct type + assert isinstance(mock_merge_request, merge_request.MergeRequest) + + # Test that the MergeRequest object has the correct attributes + assert mock_merge_request.id == GitLabMockData.MOCK_MERGE_REQUEST_DICT.get('id') + assert mock_merge_request.iid == GitLabMockData.MOCK_MERGE_REQUEST_DICT.get('iid') + assert mock_merge_request.project_id == GitLabMockData.MOCK_MERGE_REQUEST_DICT.get('project_id') + assert mock_merge_request.title == GitLabMockData.MOCK_MERGE_REQUEST_DICT.get('title') + assert mock_merge_request.description == GitLabMockData.MOCK_MERGE_REQUEST_DICT.get('description') + assert mock_merge_request.state == GitLabMockData.MOCK_MERGE_REQUEST_DICT.get('state') + assert mock_merge_request.created_at == convert_to_utc_datetime(GitLabMockData.MOCK_MERGE_REQUEST_DICT.get('created_at')) + assert mock_merge_request.updated_at == convert_to_utc_datetime(GitLabMockData.MOCK_MERGE_REQUEST_DICT.get('updated_at')) + assert mock_merge_request.merged_by is None + assert mock_merge_request.merged_at == convert_to_utc_datetime(GitLabMockData.MOCK_MERGE_REQUEST_DICT.get('merged_at')) + assert mock_merge_request.target_branch == GitLabMockData.MOCK_MERGE_REQUEST_DICT.get('target_branch') + assert mock_merge_request.source_branch == GitLabMockData.MOCK_MERGE_REQUEST_DICT.get('source_branch') + assert isinstance(mock_merge_request.author, user.User) + assert mock_merge_request.source_project_id == GitLabMockData.MOCK_MERGE_REQUEST_DICT.get('source_project_id') + assert mock_merge_request.target_project_id == GitLabMockData.MOCK_MERGE_REQUEST_DICT.get('target_project_id') + assert mock_merge_request.merge_status == GitLabMockData.MOCK_MERGE_REQUEST_DICT.get('merge_status') + assert mock_merge_request.web_url == GitLabMockData.MOCK_MERGE_REQUEST_DICT.get('web_url') + + +def test_merge_request_missing_fields(): + # Create dict with missing fields + merge_request_dict = { + "id": "ed899a2f4b50b4370feeea94676502b42383c746", + "iid": "1", + } + merge_request_object = merge_request.create_from_dict(merge_request_dict) + # Test that the MergeRequest object is of the correct type + assert isinstance(merge_request_object, merge_request.MergeRequest) + + # Test that the MergeRequest object has the correct attributes + assert merge_request_object.id == merge_request_dict.get('id') + assert merge_request_object.iid == merge_request_dict.get('iid') + assert merge_request_object.project_id is None + assert merge_request_object.title is None + assert merge_request_object.description is None + assert merge_request_object.state is None + assert merge_request_object.created_at is None + assert merge_request_object.updated_at is None + assert merge_request_object.merged_by is None + assert merge_request_object.merged_at is None + assert merge_request_object.target_branch is None + assert merge_request_object.source_branch is None + assert merge_request_object.author is None + assert merge_request_object.source_project_id is None + assert merge_request_object.target_project_id is None + assert merge_request_object.merge_status is None + assert merge_request_object.web_url is None + + +def test_initialisation_with_merged_by_user(mock_merge_request): + # Create dict and add merged_by user + merge_request_dict = GitLabMockData.MOCK_MERGE_REQUEST_DICT.copy() + merge_request_dict['merged_by'] = { + "id": 1, + "name": "Administrator", + "username": "root", + "state": "active", + "avatar_url": "https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon", + "web_url": "http://localhost:3000/root" + } + merge_request_object = merge_request.create_from_dict(merge_request_dict) + + # Test that the MergeRequest object is of the correct type + assert isinstance(merge_request_object, merge_request.MergeRequest) + + # Test that the MergeRequest object has the correct attributes + assert isinstance(merge_request_object.merged_by, user.User) + + # Test that the User object is of the correct type + assert merge_request_object.merged_by.id == 1 + assert merge_request_object.merged_by.name == 'Administrator' + assert merge_request_object.merged_by.username == 'root' + assert merge_request_object.merged_by.state == 'active' + assert merge_request_object.merged_by.web_url == 'http://localhost:3000/root' diff --git a/tests/unit/models/test_unit_milestone.py b/tests/unit/models/test_unit_milestone.py new file mode 100644 index 0000000..2993b10 --- /dev/null +++ b/tests/unit/models/test_unit_milestone.py @@ -0,0 +1,51 @@ +from gitlab_watchman.models import milestone +from gitlab_watchman.utils import convert_to_utc_datetime + +from fixtures import ( + GitLabMockData, + mock_milestone +) + + +def test_milestone_initialisation(mock_milestone): + # Test that the Milestone object is of the correct type + assert isinstance(mock_milestone, milestone.Milestone) + + # Test that the Milestone object has the correct attributes + assert mock_milestone.id == GitLabMockData.MOCK_MILESTONE_DICT.get('id') + assert mock_milestone.iid == GitLabMockData.MOCK_MILESTONE_DICT.get('iid') + assert mock_milestone.project_id == GitLabMockData.MOCK_MILESTONE_DICT.get('project_id') + assert mock_milestone.title == GitLabMockData.MOCK_MILESTONE_DICT.get('title') + assert mock_milestone.description == GitLabMockData.MOCK_MILESTONE_DICT.get('description') + assert mock_milestone.state == GitLabMockData.MOCK_MILESTONE_DICT.get('state') + assert mock_milestone.created_at == convert_to_utc_datetime(GitLabMockData.MOCK_MILESTONE_DICT.get('created_at')) + assert mock_milestone.updated_at == convert_to_utc_datetime(GitLabMockData.MOCK_MILESTONE_DICT.get('updated_at')) + assert mock_milestone.due_date == convert_to_utc_datetime(GitLabMockData.MOCK_MILESTONE_DICT.get('due_date')) + assert mock_milestone.start_date == convert_to_utc_datetime(GitLabMockData.MOCK_MILESTONE_DICT.get('start_date')) + assert mock_milestone.expired == GitLabMockData.MOCK_MILESTONE_DICT.get('expired') + assert mock_milestone.web_url == GitLabMockData.MOCK_MILESTONE_DICT.get('web_url') + + +def test_milestone_missing_fields(): + # Create dict with missing fields + milestone_dict = { + "id": "ed899a2f4b50b4370feeea94676502b42383c746", + "iid": "1", + } + milestone_object = milestone.create_from_dict(milestone_dict) + # Test that the Milestone object is of the correct type + assert isinstance(milestone_object, milestone.Milestone) + + # Test that the Milestone object has the correct attributes + assert milestone_object.id == milestone_dict.get('id') + assert milestone_object.iid == milestone_dict.get('iid') + assert milestone_object.project_id is None + assert milestone_object.title is None + assert milestone_object.description is None + assert milestone_object.state is None + assert milestone_object.created_at is None + assert milestone_object.updated_at is None + assert milestone_object.due_date is None + assert milestone_object.start_date is None + assert milestone_object.expired is None + assert milestone_object.web_url is None \ No newline at end of file diff --git a/tests/unit/models/test_unit_note.py b/tests/unit/models/test_unit_note.py new file mode 100644 index 0000000..0a4b498 --- /dev/null +++ b/tests/unit/models/test_unit_note.py @@ -0,0 +1,89 @@ +from gitlab_watchman.models import note, user +from gitlab_watchman.utils import convert_to_utc_datetime + +from fixtures import ( + GitLabMockData, + mock_note +) + + +def test_note_initialisation(mock_note): + # Test that the Note object is of the correct type + assert isinstance(mock_note, note.Note) + + # Test that the Note object has the correct attributes + assert mock_note.id == GitLabMockData.MOCK_NOTE_DICT.get('id') + assert mock_note.type == GitLabMockData.MOCK_NOTE_DICT.get('type') + assert mock_note.body == GitLabMockData.MOCK_NOTE_DICT.get('body') + assert mock_note.attachment == GitLabMockData.MOCK_NOTE_DICT.get('attachment') + assert isinstance(mock_note.author, user.User) + assert mock_note.author.id == GitLabMockData.MOCK_NOTE_DICT.get('author').get('id') + assert mock_note.author.name == GitLabMockData.MOCK_NOTE_DICT.get('author').get('name') + assert mock_note.author.username == GitLabMockData.MOCK_NOTE_DICT.get('author').get('username') + assert mock_note.author.state == GitLabMockData.MOCK_NOTE_DICT.get('author').get('state') + assert mock_note.author.web_url == GitLabMockData.MOCK_NOTE_DICT.get('author').get('web_url') + assert mock_note.created_at == convert_to_utc_datetime(GitLabMockData.MOCK_NOTE_DICT.get('created_at')) + assert mock_note.updated_at == convert_to_utc_datetime(GitLabMockData.MOCK_NOTE_DICT.get('updated_at')) + assert mock_note.system == GitLabMockData.MOCK_NOTE_DICT.get('system') + assert mock_note.noteable_id == GitLabMockData.MOCK_NOTE_DICT.get('noteable_id') + assert mock_note.noteable_type == GitLabMockData.MOCK_NOTE_DICT.get('noteable_type') + assert mock_note.commit_id == GitLabMockData.MOCK_NOTE_DICT.get('commit_id') + assert mock_note.resolvable == GitLabMockData.MOCK_NOTE_DICT.get('resolvable') + assert mock_note.resolved_by is None + assert mock_note.resolved_at == convert_to_utc_datetime(GitLabMockData.MOCK_NOTE_DICT.get('resolved_at')) + assert mock_note.confidential == GitLabMockData.MOCK_NOTE_DICT.get('confidential') + assert mock_note.noteable_iid == GitLabMockData.MOCK_NOTE_DICT.get('noteable_iid') + assert mock_note.command_changes == GitLabMockData.MOCK_NOTE_DICT.get('command_changes') + + +def test_note_missing_fields(): + # Crete dict with missing fields + note_dict = { + "id": 1, + "type": "note", + } + note_object = note.create_from_dict(note_dict) + # Test that the Note object is of the correct type + assert isinstance(note_object, note.Note) + + # Test that the Note object has the correct attributes + assert note_object.id == note_dict.get('id') + assert note_object.type == note_dict.get('type') + assert note_object.body is None + assert note_object.attachment is None + assert note_object.author is None + assert note_object.created_at is None + assert note_object.updated_at is None + assert note_object.system is None + assert note_object.noteable_id is None + assert note_object.noteable_type is None + assert note_object.commit_id is None + assert note_object.resolvable is None + assert note_object.resolved_by is None + assert note_object.resolved_at is None + assert note_object.confidential is None + assert note_object.noteable_iid is None + assert note_object.command_changes is None + + +def test_note_with_resolved_by(): + # Test that the Note object is of the correct type + note_dict = GitLabMockData.MOCK_NOTE_DICT.copy() + note_dict['resolved_by'] = { + "id": 1, + "name": "Administrator", + "username": "root", + "state": "active", + "avatar_url": "https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon", + "web_url": "http://localhost:3000/root" + } + note_object = note.create_from_dict(note_dict) + + # Test that the Note object has the correct attributes + assert isinstance(note_object, note.Note) + assert isinstance(note_object.resolved_by, user.User) + assert note_object.resolved_by.id == note_dict.get('resolved_by').get('id') + assert note_object.resolved_by.name == note_dict.get('resolved_by').get('name') + assert note_object.resolved_by.username == note_dict.get('resolved_by').get('username') + assert note_object.resolved_by.state == note_dict.get('resolved_by').get('state') + assert note_object.resolved_by.web_url == note_dict.get('resolved_by').get('web_url') \ No newline at end of file diff --git a/tests/unit/models/test_unit_project.py b/tests/unit/models/test_unit_project.py new file mode 100644 index 0000000..8ab63c5 --- /dev/null +++ b/tests/unit/models/test_unit_project.py @@ -0,0 +1,69 @@ +from gitlab_watchman.models import project +from gitlab_watchman.utils import convert_to_utc_datetime + +from fixtures import ( + GitLabMockData, + mock_project +) + + +def test_project_initialisation(mock_project): + # Test that the Project object is of the correct type + assert isinstance(mock_project, project.Project) + + # Test that the Project object has the correct attributes + assert mock_project.id == GitLabMockData.MOCK_PROJECT_DICT.get('id') + assert mock_project.description == GitLabMockData.MOCK_PROJECT_DICT.get('description') + assert mock_project.name == GitLabMockData.MOCK_PROJECT_DICT.get('name') + assert mock_project.name_with_namespace == GitLabMockData.MOCK_PROJECT_DICT.get('name_with_namespace') + assert mock_project.path == GitLabMockData.MOCK_PROJECT_DICT.get('path') + assert mock_project.path_with_namespace == GitLabMockData.MOCK_PROJECT_DICT.get('path_with_namespace') + assert mock_project.created_at == convert_to_utc_datetime(GitLabMockData.MOCK_PROJECT_DICT.get('created_at')) + assert mock_project.web_url == GitLabMockData.MOCK_PROJECT_DICT.get('web_url') + assert mock_project.last_activity_at == convert_to_utc_datetime(GitLabMockData.MOCK_PROJECT_DICT.get('last_activity_at')) + + # Test that the Namespace object is of the correct type + assert isinstance(mock_project.namespace, project.Namespace) + + # Test that the Namespace object has the correct attributes + assert mock_project.namespace.id == GitLabMockData.MOCK_PROJECT_DICT.get('namespace').get('id') + assert mock_project.namespace.name == GitLabMockData.MOCK_PROJECT_DICT.get('namespace').get('name') + assert mock_project.namespace.path == GitLabMockData.MOCK_PROJECT_DICT.get('namespace').get('path') + assert mock_project.namespace.web_url == GitLabMockData.MOCK_PROJECT_DICT.get('namespace').get('web_url') + assert mock_project.namespace.kind == GitLabMockData.MOCK_PROJECT_DICT.get('namespace').get('kind') + assert mock_project.namespace.full_path == GitLabMockData.MOCK_PROJECT_DICT.get('namespace').get('full_path') + assert mock_project.namespace.parent_id == GitLabMockData.MOCK_PROJECT_DICT.get('namespace').get('parent_id') + + +def test_project_missing_fields(): + # Create a dict with missing fields + project_dict = { + "id": "1", + "description": "Test project", + } + project_object = project.create_from_dict(project_dict) + # Test that the Project object is of the correct type + assert isinstance(project_object, project.Project) + + # Test that the Project object has the correct attributes + assert project_object.id == project_dict.get('id') + assert project_object.description == project_dict.get('description') + assert project_object.name is None + assert project_object.name_with_namespace is None + assert project_object.path is None + assert project_object.path_with_namespace is None + assert project_object.created_at is None + assert project_object.web_url is None + assert project_object.last_activity_at is None + + # Test that the Namespace object is of the correct type + assert isinstance(project_object.namespace, project.Namespace) + + # Test that the Namespace object has the correct attributes + assert project_object.namespace.id is None + assert project_object.namespace.name is None + assert project_object.namespace.path is None + assert project_object.namespace.web_url is None + assert project_object.namespace.kind is None + assert project_object.namespace.full_path is None + assert project_object.namespace.parent_id is None diff --git a/tests/unit/models/test_unit_signature.py b/tests/unit/models/test_unit_signature.py new file mode 100644 index 0000000..8cc9614 --- /dev/null +++ b/tests/unit/models/test_unit_signature.py @@ -0,0 +1,106 @@ +import pytest +import copy + +from gitlab_watchman.models import signature +from fixtures import GitLabMockData, mock_signature + + +def test_signature_initialisation(mock_signature): + # Test that the signature object is initialised + assert isinstance(mock_signature, signature.Signature) + + # Test that the signature object has the correct attributes + assert mock_signature.name == GitLabMockData.MOCK_SIGNATURE_DICT.get('name') + assert mock_signature.id == GitLabMockData.MOCK_SIGNATURE_DICT.get('id') + assert mock_signature.status == GitLabMockData.MOCK_SIGNATURE_DICT.get('status') + assert mock_signature.author == GitLabMockData.MOCK_SIGNATURE_DICT.get('author') + assert mock_signature.date == GitLabMockData.MOCK_SIGNATURE_DICT.get('date') + assert mock_signature.description == GitLabMockData.MOCK_SIGNATURE_DICT.get('description') + assert mock_signature.severity == GitLabMockData.MOCK_SIGNATURE_DICT.get('severity') + assert mock_signature.watchman_apps == GitLabMockData.MOCK_SIGNATURE_DICT.get('watchman_apps') + assert mock_signature.scope == GitLabMockData.MOCK_SIGNATURE_DICT.get('watchman_apps').get('gitlab').get('scope') + + +def test_field_type(): + # Test that correct error is raised when name is not a string + signature_dict = copy.deepcopy(GitLabMockData.MOCK_SIGNATURE_DICT) + signature_dict['name'] = 123 + with pytest.raises(TypeError): + test_signature = signature.create_from_dict(signature_dict) + + # Test that correct error is raised when id is not a string + signature_dict = copy.deepcopy(GitLabMockData.MOCK_SIGNATURE_DICT) + signature_dict['id'] = 123 + with pytest.raises(TypeError): + test_signature = signature.create_from_dict(signature_dict) + + # Test that correct error is raised when status is not a string + signature_dict = copy.deepcopy(GitLabMockData.MOCK_SIGNATURE_DICT) + signature_dict['status'] = 123 + with pytest.raises(TypeError): + test_signature = signature.create_from_dict(signature_dict) + + # Test that correct error is raised when author is not a string + signature_dict = copy.deepcopy(GitLabMockData.MOCK_SIGNATURE_DICT) + signature_dict['author'] = 123 + with pytest.raises(TypeError): + test_signature = signature.create_from_dict(signature_dict) + + # Test that correct error is raised when date is not a string + signature_dict = copy.deepcopy(GitLabMockData.MOCK_SIGNATURE_DICT) + signature_dict['date'] = 123 + with pytest.raises(TypeError): + test_signature = signature.create_from_dict(signature_dict) + + # Test that correct error is raised when severity is not a string or int + signature_dict = copy.deepcopy(GitLabMockData.MOCK_SIGNATURE_DICT) + signature_dict['severity'] = 5.0 + with pytest.raises(TypeError): + test_signature = signature.create_from_dict(signature_dict) + + # Test that correct error is raised when watchman_apps is not a dict + signature_dict_temp = copy.deepcopy(GitLabMockData.MOCK_SIGNATURE_DICT) + signature_dict_temp['watchman_apps'] = 123 + with pytest.raises(TypeError): + test_signature = signature.create_from_dict(signature_dict) + + # Test that correct error is raised when scope is not a list + signature_dict = copy.deepcopy(GitLabMockData.MOCK_SIGNATURE_DICT) + signature_dict['watchman_apps']['gitlab']['scope'] = 123 + with pytest.raises(TypeError): + test_signature = signature.create_from_dict(signature_dict) + + # Test that correct error is raised when search_strings is not a list + signature_dict = copy.deepcopy(GitLabMockData.MOCK_SIGNATURE_DICT) + signature_dict['watchman_apps']['gitlab']['search_strings'] = 123 + with pytest.raises(TypeError): + test_signature = signature.create_from_dict(signature_dict) + + # Test that correct error is raised when patterns is not a list + signature_dict = copy.deepcopy(GitLabMockData.MOCK_SIGNATURE_DICT) + signature_dict['patterns'] = 123 + with pytest.raises(TypeError): + test_signature = signature.create_from_dict(signature_dict) + + # Test that correct error is raised when version is not a string + signature_dict = copy.deepcopy(GitLabMockData.MOCK_SIGNATURE_DICT) + signature_dict['version'] = 123 + with pytest.raises(TypeError): + test_signature = signature.create_from_dict(signature_dict) + + # Test that correct error is raised when description is not a string + signature_dict = copy.deepcopy(GitLabMockData.MOCK_SIGNATURE_DICT) + signature_dict['description'] = 123 + with pytest.raises(TypeError): + test_signature = signature.create_from_dict(signature_dict) + + +def test_missing_field(): + temp_signature_dict = copy.deepcopy(GitLabMockData.MOCK_SIGNATURE_DICT) + del temp_signature_dict['name'] + test_signature = signature.create_from_dict(temp_signature_dict) + assert test_signature.name is None + + del temp_signature_dict['watchman_apps'] + test_signature = signature.create_from_dict(temp_signature_dict) + assert test_signature.watchman_apps is None diff --git a/tests/unit/models/test_unit_snippet.py b/tests/unit/models/test_unit_snippet.py new file mode 100644 index 0000000..196e324 --- /dev/null +++ b/tests/unit/models/test_unit_snippet.py @@ -0,0 +1,94 @@ +from gitlab_watchman.models import snippet, user +from gitlab_watchman.utils import convert_to_utc_datetime + +from fixtures import ( + GitLabMockData, + mock_snippet +) + + +def test_snippet_initialisation(mock_snippet): + # Test that the Snippet object is of the correct type + assert isinstance(mock_snippet, snippet.Snippet) + + # Test that the Snippet object has the correct attributes + assert mock_snippet.id == GitLabMockData.MOCK_SNIPPET_DICT.get('id') + assert mock_snippet.title == GitLabMockData.MOCK_SNIPPET_DICT.get('title') + assert mock_snippet.description == GitLabMockData.MOCK_SNIPPET_DICT.get('description') + assert mock_snippet.visibility == GitLabMockData.MOCK_SNIPPET_DICT.get('visibility') + assert mock_snippet.created_at == convert_to_utc_datetime(GitLabMockData.MOCK_SNIPPET_DICT.get('created_at')) + assert mock_snippet.updated_at == convert_to_utc_datetime(GitLabMockData.MOCK_SNIPPET_DICT.get('updated_at')) + assert mock_snippet.file_name == GitLabMockData.MOCK_SNIPPET_DICT.get('file_name') + assert mock_snippet.web_url == GitLabMockData.MOCK_SNIPPET_DICT.get('web_url') + + assert mock_snippet.files is None + assert isinstance(mock_snippet.author, user.User) + + assert mock_snippet.author.id == GitLabMockData.MOCK_SNIPPET_DICT.get('author').get('id') + assert mock_snippet.author.name == GitLabMockData.MOCK_SNIPPET_DICT.get('author').get('name') + assert mock_snippet.author.username == GitLabMockData.MOCK_SNIPPET_DICT.get('author').get('username') + assert mock_snippet.author.state == GitLabMockData.MOCK_SNIPPET_DICT.get('author').get('state') + assert mock_snippet.author.web_url == GitLabMockData.MOCK_SNIPPET_DICT.get('author').get('web_url') + + +def test_snippet_missing_fields(): + # Create a dict with missing fields + snippet_dict = { + "id": "1", + "title": "Test snippet", + } + snippet_object = snippet.create_from_dict(snippet_dict) + # Test that the Snippet object is of the correct type + assert isinstance(snippet_object, snippet.Snippet) + + # Test that the Snippet object has the correct attributes + assert snippet_object.id == snippet_dict.get('id') + assert snippet_object.title == snippet_dict.get('title') + assert snippet_object.description is None + assert snippet_object.visibility is None + assert snippet_object.created_at is None + assert snippet_object.updated_at is None + assert snippet_object.file_name is None + assert snippet_object.web_url is None + + assert snippet_object.author is None + assert snippet_object.files is None + + +def test_snippet_file_initialisation(): + # Test creating with one file + snippet_dict_one = GitLabMockData.MOCK_SNIPPET_DICT.copy() + snippet_dict_one['files'] = [ + { + 'path': 'README.md', + 'raw_url': 'https://gitlab.com/test/test/-/blob/master/README.md' + } + ] + snippet_object_one = snippet.create_from_dict(snippet_dict_one) + assert isinstance(snippet_object_one.files, list) + assert len(snippet_object_one.files) == 1 + assert isinstance(snippet_object_one.files[0], snippet.File) + assert snippet_object_one.files[0].path == 'README.md' + assert snippet_object_one.files[0].raw_url == 'https://gitlab.com/test/test/-/blob/master/README.md' + + # Test creating with multiple files + snippet_dict_two = GitLabMockData.MOCK_SNIPPET_DICT.copy() + snippet_dict_two['files'] = [ + { + 'path': 'README.md', + 'raw_url': 'https://gitlab.com/test/test/-/blob/master/README.md' + }, + { + 'path': 'LICENSE', + 'raw_url': 'https://gitlab.com/test/test/-/blob/master/LICENSE' + } + ] + snippet_object_two = snippet.create_from_dict(snippet_dict_two) + assert isinstance(snippet_object_two.files, list) + assert len(snippet_object_two.files) == 2 + assert isinstance(snippet_object_two.files[0], snippet.File) + assert isinstance(snippet_object_two.files[1], snippet.File) + assert snippet_object_two.files[0].path == 'README.md' + assert snippet_object_two.files[0].raw_url == 'https://gitlab.com/test/test/-/blob/master/README.md' + assert snippet_object_two.files[1].path == 'LICENSE' + assert snippet_object_two.files[1].raw_url == 'https://gitlab.com/test/test/-/blob/master/LICENSE' diff --git a/tests/unit/models/test_unit_user.py b/tests/unit/models/test_unit_user.py new file mode 100644 index 0000000..3f4d6d0 --- /dev/null +++ b/tests/unit/models/test_unit_user.py @@ -0,0 +1,36 @@ +from gitlab_watchman.models import user + +from fixtures import ( + GitLabMockData, + mock_user +) + + +def test_user_initialisation(mock_user): + # Test that the User object is of the correct type + assert isinstance(mock_user, user.User) + + # Test that the User object has the correct attributes + assert mock_user.id == GitLabMockData.MOCK_USER_DICT.get('id') + assert mock_user.name == GitLabMockData.MOCK_USER_DICT.get('name') + assert mock_user.username == GitLabMockData.MOCK_USER_DICT.get('username') + assert mock_user.state == GitLabMockData.MOCK_USER_DICT.get('state') + assert mock_user.web_url == GitLabMockData.MOCK_USER_DICT.get('web_url') + + +def test_user_missing_fields(): + # Create a user object with missing fields + user_dict = { + "id": "1", + "name": "Test user", + } + + # Test that the User object is of the correct type + assert isinstance(user.create_from_dict(user_dict), user.User) + + # Test that the User object has the correct attributes + assert user.create_from_dict(user_dict).id == user_dict.get('id') + assert user.create_from_dict(user_dict).name == user_dict.get('name') + assert user.create_from_dict(user_dict).username is None + assert user.create_from_dict(user_dict).state is None + assert user.create_from_dict(user_dict).web_url is None diff --git a/tests/unit/models/test_unit_wiki_blob.py b/tests/unit/models/test_unit_wiki_blob.py new file mode 100644 index 0000000..d9dc417 --- /dev/null +++ b/tests/unit/models/test_unit_wiki_blob.py @@ -0,0 +1,38 @@ +from gitlab_watchman.models import wiki_blob +from fixtures import GitLabMockData, mock_wiki_blob + + +def test_wiki_blob_initialisation(mock_wiki_blob): + # Test that the WikiBlob object is of the correct type + assert isinstance(mock_wiki_blob, wiki_blob.WikiBlob) + + # Test that the WikiBlob object has the correct attributes + assert mock_wiki_blob.id == GitLabMockData.MOCK_WIKI_BLOB_DICT.get('id') + assert mock_wiki_blob.basename == GitLabMockData.MOCK_WIKI_BLOB_DICT.get('basename') + assert mock_wiki_blob.data == GitLabMockData.MOCK_WIKI_BLOB_DICT.get('data') + assert mock_wiki_blob.path == GitLabMockData.MOCK_WIKI_BLOB_DICT.get('path') + assert mock_wiki_blob.filename == GitLabMockData.MOCK_WIKI_BLOB_DICT.get('filename') + assert mock_wiki_blob.ref == GitLabMockData.MOCK_WIKI_BLOB_DICT.get('ref') + assert mock_wiki_blob.project_id == GitLabMockData.MOCK_WIKI_BLOB_DICT.get('project_id') + assert mock_wiki_blob.group_id == GitLabMockData.MOCK_WIKI_BLOB_DICT.get('group_id') + + +def test_wiki_blob_missing_fields(): + # Create dict with missing fields + wiki_blob_dict = { + "id": "ed899a2f4b50b4370feeea94676502b42383c746", + "basename": "ed899a2f4b5", + } + wiki_blob_object = wiki_blob.create_from_dict(wiki_blob_dict) + # Test that the WikiBlob object is of the correct type + assert isinstance(wiki_blob_object, wiki_blob.WikiBlob) + + # Test that the WikiBlob object has the correct attributes + assert wiki_blob_object.id == wiki_blob_dict.get('id') + assert wiki_blob_object.basename == wiki_blob_dict.get('basename') + assert wiki_blob_object.data is None + assert wiki_blob_object.path is None + assert wiki_blob_object.filename is None + assert wiki_blob_object.ref is None + assert wiki_blob_object.project_id is None + assert wiki_blob_object.group_id is None diff --git a/tests/unit/test_unit_utils.py b/tests/unit/test_unit_utils.py new file mode 100644 index 0000000..73c964d --- /dev/null +++ b/tests/unit/test_unit_utils.py @@ -0,0 +1,217 @@ +from dataclasses import dataclass +from datetime import datetime +from typing import Dict, Any + +import pytest + +from gitlab_watchman.utils import ( + convert_to_epoch, + convert_to_utc_datetime, + deduplicate_results, + convert_to_dict, + split_to_chunks +) + + +def test_convert_to_epoch_string(): + # Test with a correct ISO 8601 timestamp containing seconds + string_timestamp = '2021-09-20T10:00:00.000+00:00' + expected_output = 1632132000 + assert convert_to_epoch(string_timestamp) == expected_output + + # Test with a correct ISO 8601 timestamp containing milliseconds + string_timestamp = '2021-09-20T10:00:00.123+00:00' + expected_output = 1632132000 + assert convert_to_epoch(string_timestamp) == expected_output + + # Test with a correct ISO 8601 timestamp containing microseconds + string_timestamp = '2021-09-20T10:00:00.123456+00:00' + expected_output = 1632132000 + assert convert_to_epoch(string_timestamp) == expected_output + + # Test with a correct ISO 8601 timestamp with different timezone - +05:00 + string_timestamp = '2021-09-20T15:00:00.000+05:00' + expected_output = 1632132000 + assert convert_to_epoch(string_timestamp) == expected_output + + # Test with a correct ISO 8601 timestamp with different timezone - -05:00 + string_timestamp = '2021-09-20T05:00:00.000-05:00' + expected_output = 1632132000 + assert convert_to_epoch(string_timestamp) == expected_output + + # Test with None input - Should gracefully fail and return None + string_timestamp = None + expected_output = None + assert convert_to_epoch(string_timestamp) == expected_output + + +def test_convert_to_epoch_datetime(): + # Test an int is returned when passing a datetime object + assert isinstance(convert_to_epoch(datetime.now()), int) + + # Test with None input - Should gracefully fail and return None + string_timestamp = None + expected_output = None + assert convert_to_epoch(string_timestamp) == expected_output + + # Test correct epoch timestamp is returned for datetime object + string_timestamp = '2021-09-20T10:00:00.000+00:00' + dt_input = datetime.strptime(string_timestamp, '%Y-%m-%dT%H:%M:%S.%f%z') + expected_output = 1632132000 + assert convert_to_epoch(dt_input) == expected_output + + # Test correct epoch timestamp is returned for datetime object with different timezone + string_timestamp = '2021-09-20T15:00:00.000+05:00' + dt_input = datetime.strptime(string_timestamp, '%Y-%m-%dT%H:%M:%S.%f%z') + expected_output = 1632132000 + assert convert_to_epoch(dt_input) == expected_output + + # Test correct epoch timestamp is returned for datetime object with different timezone + string_timestamp = '2021-09-20T05:00:00.000-05:00' + dt_input = datetime.strptime(string_timestamp, '%Y-%m-%dT%H:%M:%S.%f%z') + expected_output = 1632132000 + assert convert_to_epoch(dt_input) == expected_output + + +def test_convert_to_utc_datetime(): + # Test datetime object is returned + assert isinstance(convert_to_utc_datetime('2021-09-20T10:00:00.000+00:00'), datetime) + + # Test with a correct ISO 8601 timestamp containing seconds + string_timestamp = '2021-09-20T10:00:00.000+00:00' + expected_output = '2021-09-20 10:00:00' + assert convert_to_utc_datetime(string_timestamp).strftime('%Y-%m-%d %H:%M:%S') == expected_output + + # Test with a correct ISO 8601 timestamp containing milliseconds + string_timestamp = '2021-09-20T10:00:00.123+00:00' + expected_output = '2021-09-20 10:00:00' + assert convert_to_utc_datetime(string_timestamp).strftime('%Y-%m-%d %H:%M:%S') == expected_output + + # Test with a correct ISO 8601 timestamp containing microseconds + string_timestamp = '2021-09-20T10:00:00.123456+00:00' + expected_output = '2021-09-20 10:00:00' + assert convert_to_utc_datetime(string_timestamp).strftime('%Y-%m-%d %H:%M:%S') == expected_output + + # Test with a correct ISO 8601 timestamp with different timezone - +05:00 + string_timestamp = '2021-09-20T15:00:00.000+05:00' + expected_output = '2021-09-20 10:00:00' + assert convert_to_utc_datetime(string_timestamp).strftime('%Y-%m-%d %H:%M:%S') == expected_output + + # Test with a correct ISO 8601 timestamp with different timezone - -05:00 + string_timestamp = '2021-09-20T05:00:00.000-05:00' + expected_output = '2021-09-20 10:00:00' + assert convert_to_utc_datetime(string_timestamp).strftime('%Y-%m-%d %H:%M:%S') == expected_output + + # Test with output string containing timezone + string_timestamp = '2021-09-20T05:00:00.000-05:00' + expected_output = '2021-09-20 10:00:00 UTC' + assert convert_to_utc_datetime(string_timestamp).strftime('%Y-%m-%d %H:%M:%S %Z') == expected_output + + # Test with None input - Should gracefully fail and return None + string_timestamp = None + expected_output = None + assert convert_to_utc_datetime(string_timestamp) == expected_output + + # Test with YYYY-MM-DD input - Should return a datetime object with the timezone set to UTC + string_timestamp = '2021-09-20' + expected_output = datetime(2021, 9, 20) + assert convert_to_utc_datetime(string_timestamp) == expected_output + + +@dataclass +class TestClass: + __test__ = False + name: str + age: int + + +@pytest.fixture +def simple_example_result() -> Dict[Any, Any]: + return { + "file": { + "created": "2024-01-01 00:00:00 UTC", + "editable": False, + "user": "UABC123" + }, + "user": { + "name": "Joe Bloggs", + "age": 30, + }, + "watchman_id": "abc123" + } + + +@pytest.fixture +def dataclass_example_result_one() -> Dict[Any, Any]: + return { + "file": { + "created": "2024-01-01 00:00:00 UTC", + "editable": False, + "user": "UABC123" + }, + "user": TestClass(name='Joe Bloggs', age=30), + "watchman_id": "abc123" + } + + +@pytest.fixture +def dataclass_example_result_two() -> Dict[Any, Any]: + return { + "match_string": "2840631", + "message": { + "created": "2024-01-01 00:00:00 UTC", + "id": "abcdefghijklmnopqrstuvwxyz", + "permalink": "https://example.com", + "text": "This is a message", + "timestamp": "1729257170.452549", + "type": "message", + "user": TestClass(name='John Smith', age=30) + }, + "watchman_id": "abc1234" + } + + +def test_convert_to_dict(simple_example_result: Dict[Any, Any], + dataclass_example_result_one: Dict[Any, Any]) -> None: + # Test with simple example + assert convert_to_dict(simple_example_result) == simple_example_result + + # Test with dataclass example + assert convert_to_dict(dataclass_example_result_one) == simple_example_result + + +def test_deduplicate_results(simple_example_result: Dict[Any, Any], + dataclass_example_result_one: Dict[Any, Any], + dataclass_example_result_two: Dict[Any, Any]) -> None: + # Test with a single result + assert deduplicate_results([simple_example_result]) == [simple_example_result] + + # Test with multiple results containing duplicates + assert deduplicate_results([simple_example_result, simple_example_result]) == [ + simple_example_result] + + # Test with dataclass example + assert deduplicate_results([dataclass_example_result_one]) == [convert_to_dict(dataclass_example_result_one)] + + # Test with multiple dataclass examples with no duplicates + assert deduplicate_results([dataclass_example_result_one, dataclass_example_result_two]) == [ + convert_to_dict(dataclass_example_result_two), convert_to_dict(dataclass_example_result_one)] + + # Test with multiple dataclass examples with duplicates + assert (deduplicate_results([dataclass_example_result_one, dataclass_example_result_one]) == + [convert_to_dict(dataclass_example_result_one)]) + + +def test_split_to_chunks(): + # Define test cases + test_cases = [ + ([1, 2, 3, 4, 5, 6, 7], 3, [[1, 4, 7], [2, 5], [3, 6]]), + ([1, 2, 3, 4, 5, 6], 2, [[1, 3, 5], [2, 4, 6]]), + ([1, 2, 3, 4], 4, [[1], [2], [3], [4]]), + ([1, 2, 3], 1, [[1, 2, 3]]), + ([], 3, [[], [], []]) # Edge case: empty input list + ] + + for input_list, no_of_chunks, expected in test_cases: + result = list(split_to_chunks(input_list, no_of_chunks)) + assert result == expected, f"Failed for input_list={input_list}, no_of_chunks={no_of_chunks}" \ No newline at end of file