diff --git a/Tests/scripts/add_pr_comment.py b/.circleci/add_pr_comment.py similarity index 100% rename from Tests/scripts/add_pr_comment.py rename to .circleci/add_pr_comment.py diff --git a/Tests/scripts/circleci_spell_checker.py b/.circleci/circleci_spell_checker.py similarity index 97% rename from Tests/scripts/circleci_spell_checker.py rename to .circleci/circleci_spell_checker.py index 6e36b7f2c5ee..28d8b3f3d202 100644 --- a/Tests/scripts/circleci_spell_checker.py +++ b/.circleci/circleci_spell_checker.py @@ -1,7 +1,7 @@ import re import sys -from Tests.scripts.spell_checker import spell_checker +from spell_checker import spell_checker from demisto_sdk.commands.common.tools import run_command, find_type from demisto_sdk.commands.common.constants import DESCRIPTION_REGEX, FileType diff --git a/Utils/comment_on_pr.py b/.circleci/comment_on_pr.py similarity index 100% rename from Utils/comment_on_pr.py rename to .circleci/comment_on_pr.py diff --git a/.circleci/config.yml b/.circleci/config.yml index 9aafb154972b..08cbb7dd4e61 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -116,7 +116,7 @@ references: container_config: &container_config docker: - - image: devdemisto/content-build:3.0.0.49685 # disable-secrets-detection + - image: devdemisto/content-build:3.0.0.93625 # disable-secrets-detection auth: username: $DOCKERHUB_USER password: $DOCKERHUB_PASSWORD @@ -147,12 +147,9 @@ references: poetry --version # Check if CircleCI's config file and poetry files files are up to date # if poetry isn't up-to-date, checkout from origin/master. - ./Tests/scripts/is_file_up_to_date.sh .circleci/config.yml $CIRCLE_BRANCH - ./Tests/scripts/is_file_up_to_date.sh poetry.lock $CIRCLE_BRANCH true - ./Tests/scripts/is_file_up_to_date.sh pyproject.toml $CIRCLE_BRANCH true - ./Tests/scripts/is_file_up_to_date.sh Tests/Marketplace/core_packs_list.json $CIRCLE_BRANCH true - ./Tests/scripts/is_file_up_to_date.sh Tests/Marketplace/core_packs_mpv2_list.json $CIRCLE_BRANCH true - ./Tests/scripts/is_file_up_to_date.sh Tests/Marketplace/core_packs_xpanse_list.json $CIRCLE_BRANCH true + ./.circleci/is_file_up_to_date.sh .circleci/config.yml $CIRCLE_BRANCH + ./.circleci/is_file_up_to_date.sh poetry.lock $CIRCLE_BRANCH true + ./.circleci/is_file_up_to_date.sh pyproject.toml $CIRCLE_BRANCH true echo 'export CIRCLE_ARTIFACTS="/home/circleci/project/artifacts"' >> $BASH_ENV echo 'export PATH="/home/circleci/.local/bin:${PWD}/node_modules/.bin:${PATH}"' >> $BASH_ENV # disable-secrets-detection @@ -227,8 +224,8 @@ references: # poll for neo4j status until available while ! curl --fail http://127.0.0.1:7474 &> /dev/null; do sleep 1; done - ./Tests/scripts/linters_runner.sh - ./Tests/scripts/validate.sh + ./.circleci/linters_runner.sh + ./.circleci/validate.sh run_unit_testing_and_lint: &run_unit_testing_and_lint run: @@ -257,37 +254,22 @@ references: demisto-sdk lint -p 8 -g --test-xml ./unit-tests --log-path ./artifacts --failure-report ./artifacts --coverage-report $ARTIFACTS_FOLDER/coverage_report --docker-image << parameters.dockerimageflag >> --check-dependent-api-module - generate_coverage_reports: &generate_coverage_reports - run: - name: Generate coverage reports - when: always - no_output_timeout: 1h - command: | - EXIT_CODE=0 - if [[ -f $ARTIFACTS_FOLDER/coverage_report/.coverage ]]; then - demisto-sdk coverage-analyze -i $ARTIFACTS_FOLDER/coverage_report/.coverage --report-dir $ARTIFACTS_FOLDER/coverage_report --report-type all --previous-coverage-report-url https://storage.googleapis.com/marketplace-dist-dev/code-coverage-reports/coverage-min.json || EXIT_CODE=1 - # Checks if the $XSOAR_BOT_TEST_CONTENT exist. for security reasons only non forked pr's have access to it. - if [[ -n $XSOAR_BOT_TEST_CONTENT && -e $ARTIFACTS_FOLDER/coverage_report/html/index.html ]]; then - echo "Adding unit tests coverage comment to the pr" - python3 ./Tests/scripts/add_pr_comment.py - fi - exit $EXIT_CODE - fi - - infrastructure_testing: &infrastructure_testing - run: - name: Infrastructure testing - when: always - command: | - python3 -m pytest ./Tests/scripts/infrastructure_tests/ -v - python3 -m pytest ./Tests/Marketplace/Tests/ -v - python3 -m pytest ./Tests/tests -v - python3 -m pytest ./Tests/private_build/tests -v - python3 -m pytest Utils -v - - if [ -n "${DEMISTO_SDK_NIGHTLY}" ] ; then - ./Tests/scripts/sdk_pylint_check.sh - fi + # generate_coverage_reports: &generate_coverage_reports + # run: + # name: Generate coverage reports + # when: always + # no_output_timeout: 1h + # command: | + # EXIT_CODE=0 + # if [[ -f $ARTIFACTS_FOLDER/coverage_report/.coverage ]]; then + # demisto-sdk coverage-analyze -i $ARTIFACTS_FOLDER/coverage_report/.coverage --report-dir $ARTIFACTS_FOLDER/coverage_report --report-type all --previous-coverage-report-url https://storage.googleapis.com/marketplace-dist-dev/code-coverage-reports/coverage-min.json || EXIT_CODE=1 + # # Checks if the $XSOAR_BOT_TEST_CONTENT exist. for security reasons only non forked pr's have access to it. + # if [[ -n $XSOAR_BOT_TEST_CONTENT && -e $ARTIFACTS_FOLDER/coverage_report/html/index.html ]]; then + # echo "Adding unit tests coverage comment to the pr" + # python3 ./.circleci/add_pr_comment.py + # fi + # exit $EXIT_CODE + # fi get_contribution_pack: &get_contribution_pack when: @@ -300,7 +282,7 @@ references: USERNAME=$(echo $CONTRIB_BRANCH | cut -d ":" -f 1) BRANCH=$(echo $CONTRIB_BRANCH | cut -d ":" -f 2) $CONTRIB_REPO="content" - python3 ./Utils/update_contribution_pack_in_base_branch.py -p $PULL_REQUEST_NUMBER -b $BRANCH -u $USERNAME -c $CONTRIB_REPO -gt $GITHUB_TOKEN + python3 ./.circleci/update_contribution_pack_in_base_branch.py -p $PULL_REQUEST_NUMBER -b $BRANCH -u $USERNAME -c $CONTRIB_REPO -gt $GITHUB_TOKEN comment_on_contrib_pr: &comment_on_contrib_pr when: @@ -311,7 +293,7 @@ references: when: always command: | SERVER_URL=$(jq -r 'select(.[].Role == "Server Master") | .[].InstanceDNS' $ENV_RESULTS_PATH) - python3 ./Utils/comment_on_pr.py -p $PULL_REQUEST_NUMBER -c "Instance is ready. Server link: https://$SERVER_URL, Build link: $CIRCLE_BUILD_URL" + python3 ./.circleci/comment_on_pr.py -p $PULL_REQUEST_NUMBER -c "Instance is ready. Server link: https://$SERVER_URL, Build link: $CIRCLE_BUILD_URL" nightly_jobs: &nightly_jobs - Setup Environment: @@ -356,9 +338,8 @@ jobs: - *install_node_ci - *install_neo4j - *prepare_environment - - *infrastructure_testing - *run_unit_testing_and_lint - - *generate_coverage_reports + # - *generate_coverage_reports - store_test_results: path: ./unit-tests - *store_artifacts @@ -378,14 +359,14 @@ jobs: - run: name: Spell Checks command: | - python3 ./Tests/scripts/circleci_spell_checker.py $CIRCLE_BRANCH + python3 ./.circleci/circleci_spell_checker.py $CIRCLE_BRANCH - run: name: Verify Base Branch for Contribution when: always command: | if [[ $CIRCLE_BRANCH =~ pull/[0-9]+ ]] ; then - python3 ./Tests/scripts/verify_base_branch_for_contribution.py $CIRCLE_BRANCH + python3 ./.circleci/verify_base_branch_for_contribution.py $CIRCLE_BRANCH fi - run: name: Validate landingPageSections.json @@ -399,7 +380,7 @@ jobs: UNZIP_PATH=$(mktemp -d) unzip $INDEX_PATH -d $UNZIP_PATH - python3 Tests/Marketplace/validate_landing_page_sections.py -i $UNZIP_PATH + python3 ./.circleci/validate_landing_page_sections.py -i $UNZIP_PATH - *store_artifacts - store_artifacts: path: $ARTIFACTS_FOLDER @@ -408,24 +389,6 @@ jobs: workflows: version: 2 - commit: - when: - matches: - # matching the environment variable << pipeline.git.branch >> to contributions branch pattern. - pattern: pull/[0-9]+ - value: << pipeline.git.branch >> - jobs: - - Setup Environment - - Run Unit Testing And Lint: - requires: - - Setup Environment - matrix: - parameters: - dockerimageflag: [ "native:ga", "native:maintenance", "native:dev", "native:candidate", "from-yml" ] - name: Run Unit Testing And Lint - Docker Image:<< matrix.dockerimageflag >> - - Run Validations: - requires: - - Setup Environment nightly: triggers: diff --git a/Utils/git_pull_master_into_fork.sh b/.circleci/git_pull_master_into_fork.sh similarity index 100% rename from Utils/git_pull_master_into_fork.sh rename to .circleci/git_pull_master_into_fork.sh diff --git a/Tests/scripts/is_file_up_to_date.sh b/.circleci/is_file_up_to_date.sh similarity index 92% rename from Tests/scripts/is_file_up_to_date.sh rename to .circleci/is_file_up_to_date.sh index 18572624424e..04986c75c84f 100755 --- a/Tests/scripts/is_file_up_to_date.sh +++ b/.circleci/is_file_up_to_date.sh @@ -4,7 +4,7 @@ BRANCH=$2 SHOULD_CHECKOUT=$3 if [[ -n $BRANCH ]]; then - BRANCH=$(git branch --show-current 2>/dev/null) || BRANCH=$(git rev-parse --head) + BRANCH=$(git branch --show-current 2>/dev/null) || BRANCH=$(git rev-parse --head) fi # Checks if there's any diff from master @@ -31,7 +31,7 @@ if [[ $(git diff origin/master -G"." -- ${FILE_TO_CHECK}) ]]; then fi if [[ $BRANCH =~ pull/[0-9]+ ]]; then - echo "Run ./Utils/git_pull_master_into_fork.sh or merge manually from upstream demisto content" + echo "Run ./.circleci/git_pull_master_into_fork.sh or merge manually from upstream demisto content" fi exit 1 diff --git a/.circleci/linters_runner.sh b/.circleci/linters_runner.sh new file mode 100755 index 000000000000..39507fa483c7 --- /dev/null +++ b/.circleci/linters_runner.sh @@ -0,0 +1,19 @@ +#!/bin/bash + + +# Run flake8 pylint and mypy on all non-Packs. Packs are handled in pre-commit. +errors=0 +all_dirs=$(find . -type d -not \( -path "*cache*" -o -path "./.*" -o -path "./Templates*" -o -path "./TestPlaybooks*" -o -path "./node_modules*" -o -path "./venv*" -o -path "./Packs*" -o -path "./artifacts*" -o -path "*infrastructure_tests*" -o -path "*scripts/awsinstancetool*" -o -path "./docs*" \)) +all_1_depth_dirs=$(find . -maxdepth 1 -type d -not \( -path "*cache*" -o -path . -o -path ./Packs -o -path ./venv -o -path ./Templates -o -path ./TestPlaybooks -o -path ./node_modules -o -path "./artifacts*" -o -path "./.*" -o -path ./docs \)) + +echo -e "Top level folders to scan (used by ruff):\n${all_1_depth_dirs}\n" +echo -e "Folders to be used for lint scan (used by pylint and mypy):\n${all_dirs}\n" + +./.circleci/mypy.sh $all_1_depth_dirs || errors=$? +python3 -m ruff $all_1_depth_dirs --select=E,F,PLC,PLE --ignore=PLC1901 || errors=$? + + +echo 'Linter exit code:' $errors +if [[ $errors -ne 0 ]]; then + exit 1 +fi diff --git a/Tests/scripts/mypy.sh b/.circleci/mypy.sh similarity index 100% rename from Tests/scripts/mypy.sh rename to .circleci/mypy.sh diff --git a/Tests/scripts/spell_checker.py b/.circleci/spell_checker.py similarity index 100% rename from Tests/scripts/spell_checker.py rename to .circleci/spell_checker.py diff --git a/Utils/update_contribution_pack_in_base_branch.py b/.circleci/update_contribution_pack_in_base_branch.py similarity index 82% rename from Utils/update_contribution_pack_in_base_branch.py rename to .circleci/update_contribution_pack_in_base_branch.py index 3ea378cdc244..669d8b9a1256 100755 --- a/Utils/update_contribution_pack_in_base_branch.py +++ b/.circleci/update_contribution_pack_in_base_branch.py @@ -5,6 +5,7 @@ from urllib.parse import urljoin import requests + PER_PAGE = 100 # value of `per_page` request parameter @@ -23,11 +24,15 @@ def main(): branch = args.branch github_token = args.github_token + print( + f"args received in Utils/update_contribution_pack_in_base_branch.py script: {pr_number=}, {username=}, {repo=}, {branch=}" + ) + packs_dir_names = get_files_from_github( username, branch, pr_number, repo, github_token ) if packs_dir_names: - print('Successfully updated the base branch ' + print('Successfully updated the base branch ' # noqa: T201 'with the following contrib packs: Packs/' f'{", Packs/".join(packs_dir_names)}') @@ -71,13 +76,19 @@ def get_files_from_github( Returns: A list of packs names, if found. """ + print("Getting files from Github") content_path = os.getcwd() + print(f"content_path: {content_path}") files_list = set() chunk_size = 1024 * 500 # 500 Kb base_url = f'https://raw.githubusercontent.com/{username}/{repo}/{branch}/' + print(f"base url: {base_url}") for file_path in get_pr_files(pr_number, github_token): + print(f"file_path: {file_path}") abs_file_path = os.path.join(content_path, file_path) + print(f"abs_file_path: {abs_file_path}") abs_dir = os.path.dirname(abs_file_path) + print(f"abs_dir: {abs_dir}") if not os.path.isdir(abs_dir): os.makedirs(abs_dir) with open(abs_file_path, "wb") as changed_file, requests.get( @@ -85,11 +96,13 @@ def get_files_from_github( stream=True, headers={"Authorization": f"Bearer {github_token}"}, ) as file_content: - file_content.raise_for_status() - for data in file_content.iter_content(chunk_size=chunk_size): + # mypy didn't like the request being used as context manager + file_content.raise_for_status() # type:ignore[attr-defined] + for data in file_content.iter_content(chunk_size=chunk_size): # type:ignore[attr-defined] changed_file.write(data) files_list.add(file_path.split(os.path.sep)[1]) + print(f"list(files_list): {list(files_list)}") return list(files_list) diff --git a/Tests/scripts/validate.sh b/.circleci/validate.sh similarity index 98% rename from Tests/scripts/validate.sh rename to .circleci/validate.sh index 6cf881817fd0..b640f5beaa61 100755 --- a/Tests/scripts/validate.sh +++ b/.circleci/validate.sh @@ -9,8 +9,8 @@ if [[ $CI_COMMIT_BRANCH = master ]] || [[ -n "${NIGHTLY}" ]] || [[ -n "${BUCKET_ PACKS_TO_UPLOAD_SPACED=${PACKS_TO_UPLOAD//,/ } for item in $PACKS_TO_UPLOAD_SPACED; do python3 -m demisto_sdk validate -i Packs/"$item" --post-commit --graph --skip-pack-dependencies --run-old-validate --skip-new-validate - done - else + done + else if [[ -n "${NIGHTLY}" && "${CI_COMMIT_BRANCH}" == "master" ]]; then PREV_VER=$LAST_UPLOAD_COMMIT else diff --git a/Tests/Marketplace/validate_landing_page_sections.py b/.circleci/validate_landing_page_sections.py similarity index 85% rename from Tests/Marketplace/validate_landing_page_sections.py rename to .circleci/validate_landing_page_sections.py index 5f29e7aef39b..c013dbc7552f 100644 --- a/Tests/Marketplace/validate_landing_page_sections.py +++ b/.circleci/validate_landing_page_sections.py @@ -4,8 +4,8 @@ import sys from glob import glob -from Tests.scripts.utils.log_util import install_logging -from Tests.scripts.utils import logging_wrapper as logging +import logging +logger = logging.getLogger(__file__) LANDING_PAGE_SECTIONS_PAGE_PATH = 'Tests/Marketplace/landingPage_sections.json' @@ -22,7 +22,7 @@ def main(): content_repo_pack_names = {os.path.basename(pack_name) for pack_name in glob('Packs/*')} valid_packs = bucket_pack_names | content_repo_pack_names validate_valid_packs_in_sections(landing_page_sections_json, valid_packs) - logging.success('Validation finished successfully') + logger.info('Validation finished successfully') def validate_valid_packs_in_sections(landing_page_sections_json: dict, valid_pack_names: set) -> None: @@ -32,7 +32,7 @@ def validate_valid_packs_in_sections(landing_page_sections_json: dict, valid_pac landing_page_sections_json: The content of the landingPage_sections.json file valid_pack_names: A set containing all valid pack names from latest index.zip file and content repo """ - logging.info('validating packs in sections appear in latest index.zip file') + logger.info('validating packs in sections appear in latest index.zip file') for section_name, packs_in_section in landing_page_sections_json.items(): if section_name in {'description', 'sections'}: continue @@ -49,7 +49,7 @@ def validate_file_keys(landing_page_sections_json: dict) -> None: Args: landing_page_sections_json: The content of the landingPage_sections.json file """ - logging.info('Validating file keys are valid sections') + logger.info('Validating file keys are valid sections') allowed_keys = {'description', 'sections'} allowed_keys.update(landing_page_sections_json['sections']) not_allowed_key = [key for key in landing_page_sections_json.keys() if key not in allowed_keys] @@ -62,10 +62,9 @@ def parse_landing_page_sections_to_json(): with open(LANDING_PAGE_SECTIONS_PAGE_PATH, 'r') as file: return json.load(file) except Exception: - logging.critical('Could not parse the file as json file') + logger.critical('Could not parse the file as json file') sys.exit(1) if __name__ in ("__main__", "__builtin__", "builtins"): - install_logging('ValidateLandingPageSections.log', logger=logging) main() diff --git a/Tests/scripts/verify_base_branch_for_contribution.py b/.circleci/verify_base_branch_for_contribution.py similarity index 100% rename from Tests/scripts/verify_base_branch_for_contribution.py rename to .circleci/verify_base_branch_for_contribution.py diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index c4dba753fe2c..23115520c85e 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -9,19 +9,19 @@ /Tests/Marketplace/corepacks_override.json @yaakovpraisler # Docker native image -/Tests/docker_native_image_config.json @GuyAfik @JudahSchwartz @samuelFain +/Tests/docker_native_image_config.json @JudahSchwartz @samuelFain # Marketplace & Upload-Flow -/Tests/scripts/create_artifacts_graph/create_artifacts.py @ilaner +/Tests/scripts/create_artifacts_graph/create_artifacts.py @RosenbergYehuda /Tests/Marketplace/upload_git_snapshot.py @yaakovpraisler /Tests/Marketplace/install_packs.sh @yaakovpraisler /Tests/Marketplace/configure_and_install_packs.py @yaakovpraisler /Tests/Marketplace/copy_and_upload_packs.py @yaakovpraisler -/Tests/Marketplace/marketplace_services.py @yaakovpraisler @ilaner +/Tests/Marketplace/marketplace_services.py @yaakovpraisler @RosenbergYehuda /Tests/Marketplace/marketplace_statistics.py @yaakovpraisler /Tests/Marketplace/marketplace_constants.py @yaakovpraisler /Tests/Marketplace/zip_packs.py @yaakovpraisler -/Tests/Marketplace/upload_packs.py @yaakovpraisler @ilaner +/Tests/Marketplace/upload_packs.py @yaakovpraisler @RosenbergYehuda /Tests/Marketplace/packs_dependencies.py @yaakovpraisler /Tests/Marketplace/search_and_install_packs.py @yaakovpraisler /Tests/scripts/prepare_content_packs_for_testing.sh @yaakovpraisler @@ -31,18 +31,15 @@ /Utils/test_upload_flow/* @yaakovpraisler # Test Collection -/Tests/scripts/collect_tests @dorschw -/Tests/scripts/collect_tests/id_set.py @ilaner +/Tests/scripts/collect_tests @dorschw @michal-dagan /Tests/conf.json @sapirshuker # PANW Products /Packs/Palo_Alto_Networks_Enterprise_DLP/ @DeanArbel /Packs/PAN-OS/Integrations/ @jlevypaloalto -/Packs/PrismaCloudCompute/Integrations/ @GuyAfik -/Packs/PrismaSaasSecurity/Integrations/ @GuyAfik # Important Integrations -/Packs/QRadar/Integrations/QRadar_v3/* @ilaner +/Packs/QRadar/Integrations/QRadar_v3/* @jbabazadeh /Packs/Slack/Integrations/* @amshamah419 @rshunim /Packs/SplunkPy/Integrations/SplunkPy/* @ilappe /Packs/MicrosoftExchangeOnPremise/Integrations @amshamah419 @@ -60,9 +57,9 @@ /Packs/CommonScripts/Scripts/StixCreator/* @Ni-Knight # Common Packs -/Packs/CommonTypes/ @michalgold @idovandijk -/Packs/CommonPlaybooks/ @michalgold @idovandijk -/Packs/CommonDashboards/ @michalgold @idovandijk +/Packs/CommonTypes/ @altmannyarden @idovandijk +/Packs/CommonPlaybooks/ @altmannyarden @idovandijk +/Packs/CommonDashboards/ @altmannyarden @idovandijk /Packs/ContentManagement/ @mmhw /Packs/CommonTypes/IndicatorTypes/* @Ni-Knight /Packs/CommonTypes/Layouts/* @Ni-Knight @@ -83,7 +80,6 @@ .gitlab/* @yucohen .gitlab-ci.yml @yucohen /Tests/scripts/wait_in_line_for_cloud_env.sh @yucohen -.gitlab/ci/.gitlab-ci.staging.yml @ilaner /Tests/scripts/uninstall_packs_and_reset_bucket_cloud.sh @yucohen /Tests/Marketplace/search_and_uninstall_pack.py @yucohen /Tests/scripts/install_content_and_test_integrations.sh @yucohen @@ -94,24 +90,19 @@ Tests/scripts/test_modeling_rules.sh @AradCarmi Tests/scripts/lock_cloud_machines.py @yucohen Tests/Marketplace/server_content_items.json @dantavori -validation_config.toml @YuvHayun @JudahSchwartz @GuyAfik @anara123 +validation_config.toml @YuvHayun @JudahSchwartz @anara123 @SamuelFain # SDK Related -.gitlab/ci/.gitlab-ci.sdk-nightly.yml @dorschw -Utils/trigger_nightly_sdk_build.sh @dorschw -.pre-commit-config_template.yaml @dorschw @ilaner +.gitlab/ci/.gitlab-ci.sdk-nightly.yml @SamuelFain +.pre-commit-config_template.yaml @SamuelFain # XDR Related /Packs/CortexXDR/Integrations/ @maimorag -/Packs/Core/Integrations/ @dansterenson -/Packs/ApiModules/Scripts/CoreIRApiModule/* @dansterenson +/Packs/Core/Integrations/ @maimorag +/Packs/ApiModules/Scripts/CoreIRApiModule/* @maimorag # Dependencies -pyproject.toml @ilaner @dorschw -poetry.lock @ilaner @dorschw - -# Devcontainers -.devcontainer/* @ilaner +poetry.lock @dorschw # Demisto Class Packs/ApiModules/Scripts/DemistoClassApiModule/DemistoClassApiModule.py @dantavori diff --git a/.github/content_roles.json b/.github/content_roles.json index db5fc0b93bbb..7151244b0281 100644 --- a/.github/content_roles.json +++ b/.github/content_roles.json @@ -1,14 +1,14 @@ { "CONTRIBUTION_REVIEWERS": [ - "anas-yousef", - "mmhw", - "maimorag" + "RotemAmit", + "jlevypaloalto", + "Shellyber" ], - "CONTRIBUTION_TL": "JasBeilin", + "CONTRIBUTION_TL": "BEAdi", "CONTRIBUTION_SECURITY_REVIEWER": "ssokolovich", "ON_CALL_DEVS": [ - "adaud", - "ilappe" + "acarmi", + "ypreisler" ], "DOC_REVIEWER": "ShirleyDenkberg", "TIM_REVIEWER": "MLainer1" diff --git a/.github/workflows/check-contributor-pack.yml b/.github/workflows/check-contributor-pack.yml index 7bd24985ce7f..02b6450e0eb0 100644 --- a/.github/workflows/check-contributor-pack.yml +++ b/.github/workflows/check-contributor-pack.yml @@ -34,5 +34,5 @@ jobs: echo "PR number is: $PR_NUMBER" echo "Target branch name is: $BRANCH_NAME" echo "Starting check of contributor packs" - poetry run python ./Utils/request_contributor_review.py --pr_number $PR_NUMBER --github_token $GITHUB_TOKEN --email_api_token $SENDGRID_EMAIL_API_KEY + poetry run python ./Utils/github_workflow_scripts/request_contributor_review.py --pr_number $PR_NUMBER --github_token $GITHUB_TOKEN --email_api_token $SENDGRID_EMAIL_API_KEY echo "Finished check of contributor packs" diff --git a/.github/workflows/pre-commit-reuse.yml b/.github/workflows/pre-commit-reuse.yml index e715acd49c30..fe8844ba08c1 100644 --- a/.github/workflows/pre-commit-reuse.yml +++ b/.github/workflows/pre-commit-reuse.yml @@ -87,6 +87,7 @@ jobs: steps.check-pytest-junit-exists.outputs.files_exists == 'true' && ! github.event.pull_request.head.repo.fork uses: MishaKav/pytest-coverage-comment@main + continue-on-error: true # may fail on output > 65k chars with: pytest-xml-coverage-path: coverage_report/coverage.xml junitxml-path: .report_pytest.xml diff --git a/.github/workflows/project_manager_daily.yml b/.github/workflows/project_manager_daily.yml index a895af417068..a59f52203189 100644 --- a/.github/workflows/project_manager_daily.yml +++ b/.github/workflows/project_manager_daily.yml @@ -2,9 +2,6 @@ name: Manage Contribution Board - Daily on: schedule: - cron: "0 0 * * *" -permissions: - contents: read - jobs: manage_project_board: runs-on: ubuntu-latest @@ -18,9 +15,9 @@ jobs: - name: Get project manager run: | pip install --upgrade pip - pip install github-automation + pip install github-automation==0.2.3 - name: Manage project run: | github-automation manage -c .github/project_conf/contributions.ini env: - GITHUB_TOKEN: ${{ secrets.CONTENTBOT_BOARD_UPDATE_TOKEN }} + GITHUB_TOKEN: ${{ secrets.CONTENTBOT_GH_ADMIN_TOKEN }} diff --git a/.github/workflows/project_manager_hourly.yml b/.github/workflows/project_manager_hourly.yml index 40312134d219..dfc4b8ac8d65 100644 --- a/.github/workflows/project_manager_hourly.yml +++ b/.github/workflows/project_manager_hourly.yml @@ -2,8 +2,6 @@ name: Manage Contribution Board - Hourly on: schedule: - cron: "0 * * * *" -permissions: - contents: read jobs: manage_project_board: @@ -18,9 +16,9 @@ jobs: - name: Get project manager run: | pip install --upgrade pip - pip install github-automation + pip install github-automation==0.2.3 - name: Manage project run: | github-automation manage -c .github/project_conf/contributions.ini env: - GITHUB_TOKEN: ${{ secrets.CONTENTBOT_BOARD_UPDATE_TOKEN }} + GITHUB_TOKEN: ${{ secrets.CONTENTBOT_GH_ADMIN_TOKEN }} diff --git a/.github/workflows/protect-infra-directories.yml b/.github/workflows/protect-infra-directories.yml index 1a4762bf9a3c..346e8920a034 100644 --- a/.github/workflows/protect-infra-directories.yml +++ b/.github/workflows/protect-infra-directories.yml @@ -25,4 +25,4 @@ jobs: - name: Check for changes in protected directories run: | - python Utils/check_protected_directories.py ${{ steps.changed-files.outputs.all_changed_files }} + python Utils/github_workflow_scripts/check_protected_directories.py ${{ steps.changed-files.outputs.all_changed_files }} diff --git a/.gitignore b/.gitignore index 8553f5cf0c14..507298122772 100644 --- a/.gitignore +++ b/.gitignore @@ -32,7 +32,7 @@ Integrations/*/*_unified.yml Beta_Integrations/*/*_unified.yml Packs/*/*/*/*_unified.yml conftest.py -!Tests/scripts/dev_envs/pytest/conftest.py +!Tests/scripts/pytest/conftest.py !Tests/tests_e2e/conftest.py venv failed_unittests.txt diff --git a/.gitlab/ci/.gitlab-ci.bucket-upload.yml b/.gitlab/ci/.gitlab-ci.bucket-upload.yml deleted file mode 100644 index 799390936854..000000000000 --- a/.gitlab/ci/.gitlab-ci.bucket-upload.yml +++ /dev/null @@ -1,859 +0,0 @@ -.bucket-upload-rule: - rules: - - if: '$CI_COMMIT_BRANCH =~ /pull\/[0-9]+/' - when: never - - if: '$BUCKET_UPLOAD == "true" && $FORCE_BUCKET_UPLOAD == "false"' - -.bucket-upload-rule-always: - rules: - - if: '$CI_COMMIT_BRANCH =~ /pull\/[0-9]+/' - when: never - - if: '$BUCKET_UPLOAD == "true" && $FORCE_BUCKET_UPLOAD == "false"' - when: always - - -.check_user_permissions_to_upload_packs: &check_user_permissions_to_upload_packs - - section_start "Check User Permissions to Upload Packs" # if bucket upload and uploading to marketplace-dist - - | - if [[ -n "${BUCKET_UPLOAD}" || -n "${FORCE_BUCKET_UPLOAD}" ]] && [[ "$GCS_MARKET_BUCKET" == "$GCS_PRODUCTION_BUCKET" ]]; then - CONTENT_LEADERS=$(curl -sS "https://api.github.com/orgs/demisto/teams/content-leaders/members" -H "Authorization: token ${GITHUB_TOKEN}") - echo "received content leaders" - LEADER_NAMES=$(echo $CONTENT_LEADERS | jq -r ".[].login") - LEADER_NAMES=$(echo "${LEADER_NAMES}" "content-bot" "svc -xsoar-gitlab-mirror" "svc-xsoar-gitlab-mirror" "${USERS_ALLOWED_TRIGGER_UPLOAD}" ) - if [[ -z "$GITLAB_USER_NAME" ]] || [[ -z "`echo $LEADER_NAMES | grep -w "$GITLAB_USER_NAME"`" ]]; then - echo -e "User '$GITLAB_USER_NAME' is not allowed to trigger this build, only one of:\n${LEADER_NAMES}" - job-done - exit 1 - else - echo "User '${GITLAB_USER_NAME}' is allowed to upload packs / force upload packs." - fi - fi - - section_end "Check User Permissions to Upload Packs" - -.upload_content_graph: &upload_content_graph - - | - if [[ $TEST_UPLOAD == "false" ]]; then - section_start "Upload content graph GraphML to GCP" --collapsed - gsutil cp "${ARTIFACTS_FOLDER_SERVER_TYPE}/content_graph/${MARKETPLACE_VERSION}.zip" "gs://$GCS_MARKET_BUCKET_DEV/content_graph/$MARKETPLACE_VERSION.zip" - # copy the packs.json file to the bucket, used in contribution management - gsutil cp "${ARTIFACTS_FOLDER_SERVER_TYPE}/packs.json" "gs://${GCS_MARKET_BUCKET_DEV}/content_graph/${MARKETPLACE_VERSION}_packs.json" - section_end "Upload content graph GraphML to GCP" - fi - -.upload_dependencies_file: &upload_dependencies_file - - | - if [[ $TEST_UPLOAD == "false" ]]; then - section_start "Upload packs_dependencies.json to GCP" --collapsed - gcloud auth activate-service-account --key-file="$GCS_ARTIFACTS_KEY" >> "${ARTIFACTS_FOLDER}/logs/gcloud_auth.log" 2>&1 - gsutil cp "${ARTIFACTS_FOLDER_SERVER_TYPE}/packs_dependencies.json" "gs://xsoar-ci-artifacts/content-cache-docs/$MARKETPLACE_VERSION/packs_dependencies.json" - section_end "Upload packs_dependencies.json to GCP" - fi - -run-validations-upload-flow: - variables: - DEMISTO_SDK_GRAPH_FORCE_CREATE: "true" - extends: - - .run-validations - - .bucket-upload-rule - -run-validations-upload-flow-new-validate-flow: - variables: - DEMISTO_SDK_GRAPH_FORCE_CREATE: "true" - extends: - - .run-validations-new-validate-flow - - .bucket-upload-rule - -run-pre-commit-upload-flow: - cache: - policy: push - extends: - - .run-pre-commit - - .bucket-upload-rule - -jobs-done-check-upload-flow: - extends: - - .jobs-done-check - - .bucket-upload-rule - needs: ['run-pre-commit-upload-flow', 'run-validations-upload-flow', 'mpv2-prepare-testing-bucket-upload-flow', 'upload-id-set-bucket', 'xpanse-prepare-testing-bucket-upload-flow', 'xsoar-prepare-testing-bucket-upload-flow', 'xsoar-saas-prepare-testing-bucket-upload-flow', 'install-packs-in-server6_9', 'install-packs-in-server6_10', 'install-packs-in-server6_11', 'install-packs-in-server6_12', 'install-packs-in-server-master', 'install-packs-in-xsiam-ga', 'sync-buckets-between-projects', 'upload-packs-to-marketplace', 'upload-packs-to-marketplace-v2', 'upload-packs-to-xpanse-marketplace', 'upload-packs-to-xsoar-saas-marketplace'] - tags: - - gke - variables: - WORKFLOW: 'Upload Packs to Marketplace Storage' - - - -xsoar-prepare-testing-bucket-upload-flow: - extends: - - xsoar-prepare-testing-bucket - variables: - IFRA_ENV_TYPE: "Bucket-Upload" - ARTIFACTS_FOLDER: "${ARTIFACTS_FOLDER_XSOAR}" - rules: - - if: '$CI_COMMIT_BRANCH =~ /pull\/[0-9]+/' - when: never - - if: '$BUCKET_UPLOAD == "true"' - - if: '$FORCE_BUCKET_UPLOAD == "true"' - - -xsoar-saas-prepare-testing-bucket-upload-flow: - extends: - - xsoar-saas-prepare-testing-bucket - variables: - IFRA_ENV_TYPE: "Bucket-Upload" - ARTIFACTS_FOLDER: "${ARTIFACTS_FOLDER_XSOAR}" - rules: - - if: '$CI_COMMIT_BRANCH =~ /pull\/[0-9]+/' - when: never - - if: '$BUCKET_UPLOAD == "true"' - - if: '$FORCE_BUCKET_UPLOAD == "true"' - - -mpv2-prepare-testing-bucket-upload-flow: - extends: - - mpv2-prepare-testing-bucket - variables: - IFRA_ENV_TYPE: "Bucket-Upload" - ARTIFACTS_FOLDER: "${ARTIFACTS_FOLDER_MPV2}" - rules: - - if: '$CI_COMMIT_BRANCH =~ /pull\/[0-9]+/' - when: never - - if: '$BUCKET_UPLOAD == "true"' - - if: '$FORCE_BUCKET_UPLOAD == "true"' - - -xpanse-prepare-testing-bucket-upload-flow: - extends: - - xpanse-prepare-testing-bucket - variables: - IFRA_ENV_TYPE: "Bucket-Upload" - ARTIFACTS_FOLDER: "${ARTIFACTS_FOLDER_XPANSE}" - rules: - - if: '$CI_COMMIT_BRANCH =~ /pull\/[0-9]+/' - when: never - - if: '$BUCKET_UPLOAD == "true"' - - if: '$FORCE_BUCKET_UPLOAD == "true"' - - -.install_packs_in_xsoar_server: - tags: - - gke - needs: ["xsoar-prepare-testing-bucket-upload-flow"] - stage: run-instances - artifacts: - expire_in: 48 hrs - paths: - - ${CI_PROJECT_DIR}/artifacts/* - - ${CI_PROJECT_DIR}/pipeline_jobs_folder/* - when: always - variables: - PRODUCT_TYPE: "XSOAR" - SERVER_TYPE: "XSOAR" - ARTIFACTS_FOLDER: "${ARTIFACTS_FOLDER_XSOAR}" - ARTIFACTS_FOLDER_INSTANCE: "${ARTIFACTS_FOLDER_XSOAR}/instance_${INSTANCE_ROLE}" - ARTIFACTS_FOLDER_SERVER_TYPE: "${ARTIFACTS_FOLDER_XSOAR}/server_type_${SERVER_TYPE}" - ENV_RESULTS_PATH: "${ARTIFACTS_FOLDER_SERVER_TYPE}/env_results.json" - INSTANCE_CREATED: "true" - SSH_TUNNEL_TIMEOUT: 10 - TIME_TO_LIVE: "" - extends: - - .bucket-upload-rule - - .default-job-settings - script: - - EXIT_CODE=0 - - !reference [.download-demisto-conf] - - section_start "Secrets Fetch" --collapsed - - SECRET_CONF_PATH=$(cat secret_conf_path) - - python3 ./Tests/scripts/add_secrets_file_to_build.py -sa "$GSM_SERVICE_ACCOUNT" -sf "$SECRET_CONF_PATH" -u "$DEMISTO_USERNAME" -p "$DEMISTO_PASSWORD" --gsm_project_id_dev "$GSM_PROJECT_ID_DEV" --gsm_project_id_prod "$GSM_PROJECT_ID" >> $ARTIFACTS_FOLDER/logs/handle_secrets.log - - section_end "Secrets Fetch" - - !reference [.ssh-config-setup] - - section_start "Check if should run Instance role" - - export INSTANCES_CREATED_FOR_ROLE=$(cat "${ENV_RESULTS_PATH}" | jq -c "map(select(.Role == \"${INSTANCE_ROLE}\")) | length") - - | - echo "Instance role:${INSTANCE_ROLE} Product type:${PRODUCT_TYPE} Instances created for role:${INSTANCES_CREATED_FOR_ROLE}" - if [[ "${INSTANCES_CREATED_FOR_ROLE}" -eq 0 ]]; then - echo "Instances with role ${INSTANCE_ROLE} were not created, not running the instance flow." - rm -f "${ARTIFACTS_FOLDER_INSTANCE}/instance_role.txt" # delete the instance_role.txt file so the job will not be collected by slack notifier. - job-done - exit 0 - fi - - section_end "Check if should run Instance role" - - section_start "Get Instance Variables" - - echo INSTANCE_ROLE="$INSTANCE_ROLE" - - echo INSTANCE_CREATED="$INSTANCE_CREATED" - - section_end "Get Instance Variables" - - section_start "Wait Until Server Ready" - - | - [ -n "${NIGHTLY}" ] && IS_NIGHTLY=true || IS_NIGHTLY=false - python3 ./Tests/scripts/wait_until_server_ready.py -n ${IS_NIGHTLY} --instance-role "${INSTANCE_ROLE}" || EXIT_CODE=$? - - section_end "Wait Until Server Ready" - - section_start "Install Packs" - - ./Tests/Marketplace/install_packs.sh || EXIT_CODE=$? - - section_end "Install Packs" - - section_start "Get instance ssh-command" - - echo "INSTANCE_ROLE -> ${INSTANCE_ROLE}" - - INSTANCE_NAME=$(jq -r --arg role "$INSTANCE_ROLE" '.[] | select(.Role == $role) | .InstanceName' $ENV_RESULTS_PATH) - - echo -e "\e[1m gcloud compute ssh --zone \"us-central1-a\" \"${INSTANCE_NAME}\" --tunnel-through-iap --project "xsoar-content-build" \e[0m" - - section_end "Get instance ssh-command" - - job-done - - exit "${EXIT_CODE}" - after_script: - - !reference [.default-after-script] - - !reference [.install_ssh_keys] - - !reference [.ssh-config-setup] - - !reference [.destroy_xsoar_instances] - - -install-packs-in-server6_9: - extends: .install_packs_in_xsoar_server - variables: - INSTANCE_ROLE: "Server 6.9" - -install-packs-in-server6_10: - extends: .install_packs_in_xsoar_server - variables: - INSTANCE_ROLE: "Server 6.10" - -install-packs-in-server6_11: - extends: .install_packs_in_xsoar_server - variables: - INSTANCE_ROLE: "Server 6.11" - -install-packs-in-server6_12: - extends: .install_packs_in_xsoar_server - variables: - INSTANCE_ROLE: "Server 6.12" - -install-packs-in-server-master: - extends: .install_packs_in_xsoar_server - variables: - INSTANCE_ROLE: "Server Master" - - -.install-mpv2-packs-on-xsiam-instances: - tags: - - gke - needs: ["mpv2-prepare-testing-bucket-upload-flow"] - stage: run-instances - artifacts: - expire_in: 48 hrs - paths: - - ${CI_PROJECT_DIR}/artifacts/* - - ${CI_PROJECT_DIR}/pipeline_jobs_folder/* - when: always - rules: - - if: '$CI_COMMIT_BRANCH =~ /pull\/[0-9]+/' - when: never - - if: $TEST_UPLOAD == "true" && $BUCKET_UPLOAD == "true" && $FORCE_BUCKET_UPLOAD == "false" - when: always - variables: - CLOUD_MACHINES_TYPE: "build" - GCS_LOCKS_PATH: "content-locks/locks-xsiam-ga" - - if: '$BUCKET_UPLOAD == "true" && $FORCE_BUCKET_UPLOAD == "false"' - variables: - PRODUCT_TYPE: "XSIAM" - SERVER_TYPE: "XSIAM" - ARTIFACTS_FOLDER: "${ARTIFACTS_FOLDER_MPV2}" - ARTIFACTS_FOLDER_INSTANCE: "${ARTIFACTS_FOLDER_MPV2}/instance_${INSTANCE_ROLE}" - ARTIFACTS_FOLDER_SERVER_TYPE: "${ARTIFACTS_FOLDER_MPV2}/server_type_${SERVER_TYPE}" - ENV_RESULTS_PATH: "${ARTIFACTS_FOLDER_SERVER_TYPE}/env_results.json" - INSTANCE_CREATED: "true" - TIME_TO_LIVE: "" - GCS_LOCKS_PATH: "content-locks/locks-xsiam-ga-upload" - CLOUD_MACHINES_TYPE: "upload" - CLOUD_MACHINES_COUNT: 1 - extends: - - .default-job-settings - script: - - EXIT_CODE=0 - - !reference [.download-demisto-conf] - - section_start "Secrets Fetch" --collapsed - - SECRET_CONF_PATH=$(cat secret_conf_path) - - python3 ./Tests/scripts/add_secrets_file_to_build.py -sa "$GSM_SERVICE_ACCOUNT" -sf "$SECRET_CONF_PATH" -u "$DEMISTO_USERNAME" -p "$DEMISTO_PASSWORD" --gsm_project_id_dev "$GSM_PROJECT_ID_DEV" --gsm_project_id_prod "$GSM_PROJECT_ID" >> $ARTIFACTS_FOLDER/logs/handle_secrets.log - - section_end "Secrets Fetch" - - section_start "Lock Machine" - - echo "Authenticating GCP" - - gcloud auth activate-service-account --key-file="$GCS_ARTIFACTS_KEY" >> "${ARTIFACTS_FOLDER}/logs/gcloud_auth.log" 2>&1 - - echo "Auth done successfully" - - ./Tests/scripts/wait_in_line_for_cloud_env.sh "$CLOUD_MACHINES_TYPE" - - source CloudEnvVariables - - echo "CLOUD Chosen machine ids are:${CLOUD_CHOSEN_MACHINE_IDS}" - - section_end "Lock Machine" - - - CLOUD_SERVERS_PATH=$(cat $CLOUD_SERVERS_FILE) - - cat "${CLOUD_API_KEYS}" > "cloud_api_keys.json" - - !reference [.uninstall-packs-and-reset-bucket-cloud] - - - section_start "Run XSIAM end to end sanity tests" - - ./Tests/scripts/run_e2e_tests.sh || EXIT_CODE=$? - - section_end "Run XSIAM end to end sanity tests" - - - section_start "Get Instance Variables" - - echo INSTANCE_ROLE="$INSTANCE_ROLE" - - echo INSTANCE_CREATED="$INSTANCE_CREATED" - - section_end "Get Instance Variables" - - - section_start "Install Packs" - - ./Tests/Marketplace/install_packs.sh || EXIT_CODE=$? - - section_end "Install Packs" - - - job-done - - exit "$EXIT_CODE" - after_script: - - source .gitlab/helper_functions.sh - - !reference [ .unlock-machine ] - -install-packs-in-xsiam-ga: - extends: .install-mpv2-packs-on-xsiam-instances - variables: - INSTANCE_ROLE: "XSIAM" - GCS_QUEUE_FILE: "queue-ga" - GCS_SOURCE_BUCKET: "$GCS_PRODUCTION_V2_BUCKET" - GCS_MACHINES_BUCKET: "marketplace-v2-dist-dev/upload-flow/builds-xsiam" - CLOUD_SERVERS_FILE: "xsiam_servers_path" - CLOUD_API_KEYS: $XSIAM_API_KEYS - CLOUD_API_TOKENS: $XSIAM_TOKENS - NON_REMOVABLE_PACKS: "Base" - - -upload-packs-to-marketplace: - tags: - - gke - needs: ["run-validations-upload-flow", "install-packs-in-server6_9", "install-packs-in-server6_10", "install-packs-in-server6_11", "install-packs-in-server6_12", "install-packs-in-server-master", "run-pre-commit-upload-flow"] - stage: upload-to-marketplace - artifacts: - expire_in: 48 hrs - paths: - - ${CI_PROJECT_DIR}/artifacts/* - - ${CI_PROJECT_DIR}/pipeline_jobs_folder/* - when: always - variables: - PRODUCT_TYPE: "XSOAR" - SERVER_TYPE: "XSOAR" - INSTANCE_ROLE: "Server Master" - MARKETPLACE_VERSION: "xsoar" - ARTIFACTS_FOLDER: "${ARTIFACTS_FOLDER_XSOAR}" - ARTIFACTS_FOLDER_SERVER_TYPE: "${ARTIFACTS_FOLDER_XSOAR}/server_type_${SERVER_TYPE}" - ENV_RESULTS_PATH: "${ARTIFACTS_FOLDER_SERVER_TYPE}/env_results.json" - INSTANCE_CREATED: "true" - SSH_TUNNEL_TIMEOUT: 10 - TIME_TO_LIVE: "" - extends: - - .bucket-upload-rule - - .default-job-settings - script: - - !reference [.ssh-config-setup] - - *check_user_permissions_to_upload_packs - - section_start "Upload Packs To Marketplace Storage" - - | - if [[ "$CI_COMMIT_BRANCH" == "master" ]] || [[ "$GCS_MARKET_BUCKET" != "$GCS_PRODUCTION_BUCKET" ]]; then - EXTRACT_FOLDER=$(mktemp -d) - PACK_ARTIFACTS="${ARTIFACTS_FOLDER_SERVER_TYPE}/content_packs.zip" - PACKS_DEPENDENCIES="${ARTIFACTS_FOLDER_SERVER_TYPE}/packs_dependencies.json" - CI_COMMIT_BRANCH=${CI_COMMIT_BRANCH:-unknown} - GCS_BUILD_BUCKET="marketplace-ci-build" - if [[ $GCS_MARKET_BUCKET == $GCS_PRODUCTION_BUCKET ]] && [[ -z $STORAGE_BASE_PATH ]]; then - STORAGE_BASE_PATH="content" - fi - - if [[ $GCS_MARKET_BUCKET != $GCS_PRODUCTION_BUCKET ]] && [[ -z $STORAGE_BASE_PATH ]]; then - STORAGE_BASE_PATH="upload-flow/builds/$CI_COMMIT_BRANCH/$CI_PIPELINE_ID/content" - fi - - if [[ -z "${PACKS_TO_UPLOAD}" ]]; then - PACKS_TO_UPLOAD="All" - fi - python3 ./Tests/Marketplace/copy_and_upload_packs.py -a "${PACK_ARTIFACTS}" -e $EXTRACT_FOLDER -pb "$GCS_MARKET_BUCKET" -bb "$GCS_BUILD_BUCKET" -s $GCS_MARKET_KEY -n $CI_PIPELINE_ID -c $CI_COMMIT_BRANCH -p "${PACKS_TO_UPLOAD}" -pbp "$STORAGE_BASE_PATH/packs" --marketplace xsoar - - gcloud auth activate-service-account --key-file="$GCS_ARTIFACTS_KEY" >> "${ARTIFACTS_FOLDER}/logs/gcloud_auth.log" 2>&1 - if [[ $TEST_UPLOAD == "false" ]] && [[ -f "${ARTIFACTS_FOLDER_SERVER_TYPE}/packs_results_upload.json" ]]; then - gsutil cp "${ARTIFACTS_FOLDER_SERVER_TYPE}/packs_results_upload.json" "gs://xsoar-ci-artifacts/content/$CI_COMMIT_SHA/$MARKETPLACE_VERSION/packs_results_upload.json" - echo "packs_results_upload.json upload successfully" - fi - - core_packs_files_count=$(find "${ARTIFACTS_FOLDER_SERVER_TYPE}" -name "corepacks*.json" | wc -l) - if [ "${core_packs_files_count}" -eq 0 ]; then - echo "No core packs files were found, skipping uploading." - else - echo "Uploading ${core_packs_files_count} core packs files." - # Copy core packs files from the artifacts folder to the build bucket: - find "${ARTIFACTS_FOLDER_SERVER_TYPE}" -name "corepacks*.json" -exec gsutil cp -z json "{}" "gs://$GCS_MARKET_BUCKET/$STORAGE_BASE_PATH/packs" \; - echo "Successfully uploaded core packs files." - fi - fi - - section_end "Upload Packs To Marketplace Storage" - - - section_start "Download packs from GCP" - - | - if [[ $GCS_MARKET_BUCKET == $GCS_PRODUCTION_BUCKET ]] && [[ -z $STORAGE_BASE_PATH ]]; then - STORAGE_BASE_PATH="content" - fi - if [[ $GCS_MARKET_BUCKET != $GCS_PRODUCTION_BUCKET ]] && [[ -z $STORAGE_BASE_PATH ]]; then - STORAGE_BASE_PATH="upload-flow/builds/$CI_COMMIT_BRANCH/$CI_PIPELINE_ID/content" - fi - - PACKS_SRC="gs://$GCS_MARKET_BUCKET/$STORAGE_BASE_PATH/packs" - ZIP_FOLDER=$(mktemp -d) - gcloud auth activate-service-account --key-file="$GCS_MARKET_KEY" >> "${ARTIFACTS_FOLDER}/logs/gcloud_auth.log" 2>&1 - echo "successfully activated google cloud service account" - gsutil -m cp -r $PACKS_SRC $ZIP_FOLDER - echo "successfully downloaded index.zip" - - section_end "Download packs from GCP" - - *upload_content_graph - - *upload_dependencies_file - - section_start "Revoking GCP Auth" - - gcloud auth revoke "${GCS_ARTIFACTS_ACCOUNT_NAME}" >> "${ARTIFACTS_FOLDER}/logs/gcloud_auth.log" 2>&1 - - section_end "Revoking GCP Auth" - - - section_start "Zip Content Packs From GCS" - - python3 ./Tests/Marketplace/zip_packs.py -z $ZIP_FOLDER -a $ARTIFACTS_FOLDER -s $GCS_MARKET_KEY - - section_end "Zip Content Packs From GCS" - - - job-done - -upload-packs-to-marketplace-v2: - tags: - - gke - needs: ["run-validations-upload-flow", "run-pre-commit-upload-flow", "mpv2-prepare-testing-bucket-upload-flow", "install-packs-in-xsiam-ga"] # "install-packs-in-xsiam-dev" - stage: upload-to-marketplace - artifacts: - expire_in: 48 hrs - paths: - - ${CI_PROJECT_DIR}/artifacts/* - - ${CI_PROJECT_DIR}/pipeline_jobs_folder/* - when: always - variables: - MARKETPLACE_VERSION: "marketplacev2" - INSTANCE_ROLE: "XSIAM" - PRODUCT_TYPE: "XSIAM" - SERVER_TYPE: "XSIAM" - ARTIFACTS_FOLDER: "${ARTIFACTS_FOLDER_MPV2}" - ARTIFACTS_FOLDER_SERVER_TYPE: "${ARTIFACTS_FOLDER_MPV2}/server_type_${SERVER_TYPE}" - INSTANCE_CREATED: "true" - SSH_TUNNEL_TIMEOUT: 10 - TIME_TO_LIVE: "" - extends: - - .bucket-upload-rule - - .default-job-settings - script: - - *check_user_permissions_to_upload_packs - - section_start "Upload Packs To Marketplace Storage" - - | - if [[ "$CI_COMMIT_BRANCH" == "master" ]] || [[ "$GCS_MARKET_V2_BUCKET" != "$GCS_PRODUCTION_V2_BUCKET" ]]; then - EXTRACT_FOLDER=$(mktemp -d) - PACK_ARTIFACTS="${ARTIFACTS_FOLDER_SERVER_TYPE}/content_packs.zip" - PACKS_DEPENDENCIES="${ARTIFACTS_FOLDER_SERVER_TYPE}/packs_dependencies.json" - CI_COMMIT_BRANCH=${CI_COMMIT_BRANCH:-unknown} - GCS_BUILD_BUCKET="marketplace-ci-build" - if [[ -z $STORAGE_BASE_PATH ]]; then - if [[ $GCS_MARKET_V2_BUCKET == $GCS_PRODUCTION_V2_BUCKET ]]; then - STORAGE_BASE_PATH="content" - else - STORAGE_BASE_PATH="upload-flow/builds/$CI_COMMIT_BRANCH/$CI_PIPELINE_ID/content" - fi - echo "Set storage base path to $STORAGE_BASE_PATH" - fi - if [[ -z "${PACKS_TO_UPLOAD}" ]]; then - PACKS_TO_UPLOAD="All" - fi - python3 ./Tests/Marketplace/copy_and_upload_packs.py -a "${PACK_ARTIFACTS}" -e $EXTRACT_FOLDER -pb "$GCS_MARKET_V2_BUCKET" -bb "$GCS_BUILD_BUCKET" -s $GCS_MARKET_KEY -n $CI_PIPELINE_ID -c $CI_COMMIT_BRANCH -p "${PACKS_TO_UPLOAD}" -pbp "$STORAGE_BASE_PATH/packs" --marketplace marketplacev2 - - gcloud auth activate-service-account --key-file="$GCS_ARTIFACTS_KEY" >> "${ARTIFACTS_FOLDER}/logs/gcloud_auth.log" 2>&1 - - if [[ $TEST_UPLOAD == "false" ]] && [[ -f "${ARTIFACTS_FOLDER_SERVER_TYPE}/packs_results_upload.json" ]]; then - gsutil cp "${ARTIFACTS_FOLDER_SERVER_TYPE}/packs_results_upload.json" "gs://xsoar-ci-artifacts/content/$CI_COMMIT_SHA/$MARKETPLACE_VERSION/packs_results_upload.json" - echo "packs_results_upload.json upload successfully" - fi - - core_packs_files_count=$(find "${ARTIFACTS_FOLDER_SERVER_TYPE}" -name "corepacks*.json" | wc -l) - if [ "${core_packs_files_count}" -eq 0 ]; then - echo "No core packs files were found, skipping uploading." - else - echo "Uploading ${core_packs_files_count} core packs files." - # Copy core packs files from the artifacts folder to the build bucket: - find "${ARTIFACTS_FOLDER_SERVER_TYPE}" -name "corepacks*.json" -exec gsutil cp -z json "{}" "gs://$GCS_MARKET_V2_BUCKET/$STORAGE_BASE_PATH/packs" \; - echo "Successfully uploaded core packs files." - fi - fi - - section_end "Upload Packs To Marketplace Storage" - - - section_start "Download packs from GCP" - - | - if [[ $GCS_MARKET_V2_BUCKET == $GCS_PRODUCTION_V2_BUCKET ]] && [[ -z $STORAGE_BASE_PATH ]]; then - STORAGE_BASE_PATH="content" - fi - if [[ $GCS_MARKET_V2_BUCKET != $GCS_PRODUCTION_V2_BUCKET ]] && [[ -z $STORAGE_BASE_PATH ]]; then - STORAGE_BASE_PATH="upload-flow/builds/$CI_COMMIT_BRANCH/$CI_PIPELINE_ID/content" - fi - - PACKS_SRC="gs://$GCS_MARKET_V2_BUCKET/$STORAGE_BASE_PATH/packs" - ZIP_FOLDER=$(mktemp -d) - gcloud auth activate-service-account --key-file="$GCS_MARKET_KEY" >> "${ARTIFACTS_FOLDER}/logs/gcloud_auth.log" 2>&1 - echo "successfully activated google cloud service account" - gsutil -m cp -r $PACKS_SRC $ZIP_FOLDER - echo "successfully downloaded index.zip" - - section_end "Download packs from GCP" - - *upload_content_graph - - *upload_dependencies_file - - section_start "Revoking GCP Auth" - - gcloud auth revoke "${GCS_ARTIFACTS_ACCOUNT_NAME}" >> "${ARTIFACTS_FOLDER}/logs/gcloud_auth.log" 2>&1 - - section_end "Revoking GCP Auth" - - section_start "Zip Content Packs From GCS" - - python3 ./Tests/Marketplace/zip_packs.py -z $ZIP_FOLDER -a $ARTIFACTS_FOLDER -s $GCS_MARKET_KEY - - section_end "Zip Content Packs From GCS" - - job-done - -upload-packs-to-xpanse-marketplace: - tags: - - gke - needs: ["run-validations-upload-flow", "run-pre-commit-upload-flow", "xpanse-prepare-testing-bucket-upload-flow"] # "install-packs-in-xpanse-dev" - stage: upload-to-marketplace - artifacts: - expire_in: 48 hrs - paths: - - ${CI_PROJECT_DIR}/artifacts/* - - ${CI_PROJECT_DIR}/pipeline_jobs_folder/* - when: always - variables: - MARKETPLACE_VERSION: "xpanse" - INSTANCE_ROLE: "XPANSE" - PRODUCT_TYPE: "XPANSE" - SERVER_TYPE: "XPANSE" - ARTIFACTS_FOLDER: "${ARTIFACTS_FOLDER_XPANSE}" - ARTIFACTS_FOLDER_SERVER_TYPE: "${ARTIFACTS_FOLDER_XPANSE}/server_type_${SERVER_TYPE}" - INSTANCE_CREATED: "true" - SSH_TUNNEL_TIMEOUT: 10 - TIME_TO_LIVE: "" - extends: - - .bucket-upload-rule - - .default-job-settings - script: - - *check_user_permissions_to_upload_packs - - section_start "Upload Packs To Marketplace Storage" - - | - if [[ "$CI_COMMIT_BRANCH" == "master" ]] || [[ "$GCS_MARKET_XPANSE_BUCKET" != "$GCS_PRODUCTION_XPANSE_BUCKET" ]]; then - EXTRACT_FOLDER=$(mktemp -d) - PACK_ARTIFACTS="${ARTIFACTS_FOLDER_SERVER_TYPE}/content_packs.zip" - PACKS_DEPENDENCIES="${ARTIFACTS_FOLDER_SERVER_TYPE}/packs_dependencies.json" - CI_COMMIT_BRANCH=${CI_COMMIT_BRANCH:-unknown} - GCS_BUILD_BUCKET="marketplace-ci-build" - if [[ -z $STORAGE_BASE_PATH ]]; then - if [[ $GCS_MARKET_XPANSE_BUCKET == $GCS_PRODUCTION_XPANSE_BUCKET ]]; then - STORAGE_BASE_PATH="content" - else - STORAGE_BASE_PATH="upload-flow/builds/$CI_COMMIT_BRANCH/$CI_PIPELINE_ID/content" - fi - echo "Set storage base path to $STORAGE_BASE_PATH" - fi - if [[ -z "${PACKS_TO_UPLOAD}" ]]; then - PACKS_TO_UPLOAD="All" - fi - python3 ./Tests/Marketplace/copy_and_upload_packs.py -a "${PACK_ARTIFACTS}" -e $EXTRACT_FOLDER -pb "$GCS_MARKET_XPANSE_BUCKET" -bb "$GCS_BUILD_BUCKET" -s $GCS_MARKET_KEY -n $CI_PIPELINE_ID -c $CI_COMMIT_BRANCH -p "${PACKS_TO_UPLOAD}" -pbp "$STORAGE_BASE_PATH/packs" --marketplace xpanse - - gcloud auth activate-service-account --key-file="$GCS_ARTIFACTS_KEY" >> "${ARTIFACTS_FOLDER}/logs/gcloud_auth.log" 2>&1 - if [[ $TEST_UPLOAD == "false" ]] && [[ -f "${ARTIFACTS_FOLDER_SERVER_TYPE}/packs_results_upload.json" ]]; then - gsutil cp "${ARTIFACTS_FOLDER_SERVER_TYPE}/packs_results_upload.json" "gs://xsoar-ci-artifacts/content/$CI_COMMIT_SHA/$MARKETPLACE_VERSION/packs_results_upload.json" - echo "packs_results_upload.json upload successfully" - fi - - core_packs_files_count=$(find "${ARTIFACTS_FOLDER_SERVER_TYPE}" -name "corepacks*.json" | wc -l) - if [ "${core_packs_files_count}" -eq 0 ]; then - echo "No core packs files were found, skipping uploading." - else - echo "Uploading ${core_packs_files_count} core packs files." - # Copy core packs files from the artifacts folder to the build bucket: - find "${ARTIFACTS_FOLDER_SERVER_TYPE}" -name "corepacks*.json" -exec gsutil cp -z json "{}" "gs://$GCS_MARKET_XPANSE_BUCKET/$STORAGE_BASE_PATH/packs" \; - echo "Successfully uploaded core packs files." - fi - fi - - section_end "Upload Packs To Marketplace Storage" - - - section_start "Download packs from GCP" - - | - if [[ $GCS_MARKET_XPANSE_BUCKET == $GCS_PRODUCTION_XPANSE_BUCKET ]] && [[ -z $STORAGE_BASE_PATH ]]; then - STORAGE_BASE_PATH="content" - fi - if [[ $GCS_MARKET_XPANSE_BUCKET != $GCS_PRODUCTION_XPANSE_BUCKET ]] && [[ -z $STORAGE_BASE_PATH ]]; then - STORAGE_BASE_PATH="upload-flow/builds/$CI_COMMIT_BRANCH/$CI_PIPELINE_ID/content" - fi - - PACKS_SRC="gs://$GCS_MARKET_XPANSE_BUCKET/$STORAGE_BASE_PATH/packs" - ZIP_FOLDER=$(mktemp -d) - gcloud auth activate-service-account --key-file="$GCS_MARKET_KEY" >> "${ARTIFACTS_FOLDER}/logs/gcloud_auth.log" 2>&1 - echo "successfully activated google cloud service account" - gsutil -m cp -r $PACKS_SRC $ZIP_FOLDER - echo "successfully downloaded index.zip" - - section_end "Download packs from GCP" - - *upload_content_graph - - *upload_dependencies_file - - section_start "Revoking GCP Auth" - - gcloud auth revoke "${GCS_ARTIFACTS_ACCOUNT_NAME}" >> "${ARTIFACTS_FOLDER}/logs/gcloud_auth.log" 2>&1 - - section_end "Revoking GCP Auth" - - - section_start "Zip Content Packs From GCS" - - python3 ./Tests/Marketplace/zip_packs.py -z $ZIP_FOLDER -a $ARTIFACTS_FOLDER -s $GCS_MARKET_KEY - - section_end "Zip Content Packs From GCS" - - job-done - -upload-packs-to-xsoar-saas-marketplace: - tags: - - gke - needs: ["run-validations-upload-flow", "run-pre-commit-upload-flow", "xsoar-saas-prepare-testing-bucket-upload-flow"] # "install-packs-in-xsoar-saas-dev" - stage: upload-to-marketplace - artifacts: - expire_in: 48 hrs - paths: - - ${CI_PROJECT_DIR}/artifacts/* - - ${CI_PROJECT_DIR}/pipeline_jobs_folder/* - when: always - variables: - MARKETPLACE_VERSION: "xsoar_saas" - INSTANCE_ROLE: "xsoar_saas" - PRODUCT_TYPE: "XSOAR" - SERVER_TYPE: "XSOAR SAAS" - ARTIFACTS_FOLDER: "${ARTIFACTS_FOLDER_XSOAR}" - ARTIFACTS_FOLDER_SERVER_TYPE: "${ARTIFACTS_FOLDER_XSOAR}/server_type_${SERVER_TYPE}" - INSTANCE_CREATED: "true" - SSH_TUNNEL_TIMEOUT: 10 - TIME_TO_LIVE: "" - extends: - - .bucket-upload-rule - - .default-job-settings - script: - - *check_user_permissions_to_upload_packs - - section_start "Upload Packs To Marketplace Storage" - - | - echo "$GCS_MARKET_XSOAR_SAAS_BUCKET" - echo "$GCS_PRODUCTION_XSOAR_SAAS_BUCKET" - if [[ "$CI_COMMIT_BRANCH" == "master" ]] || [[ "$GCS_MARKET_XSOAR_SAAS_BUCKET" != "$GCS_PRODUCTION_XSOAR_SAAS_BUCKET" ]]; then - EXTRACT_FOLDER=$(mktemp -d) - PACK_ARTIFACTS="${ARTIFACTS_FOLDER_SERVER_TYPE}/content_packs.zip" - PACKS_DEPENDENCIES="${ARTIFACTS_FOLDER_SERVER_TYPE}/packs_dependencies.json" - CI_COMMIT_BRANCH=${CI_COMMIT_BRANCH:-unknown} - GCS_BUILD_BUCKET="marketplace-ci-build" - if [[ -z $STORAGE_BASE_PATH ]]; then - if [[ "$GCS_MARKET_XSOAR_SAAS_BUCKET" == "$GCS_PRODUCTION_XSOAR_SAAS_BUCKET" ]]; then - STORAGE_BASE_PATH="content" - else - STORAGE_BASE_PATH="upload-flow/builds/$CI_COMMIT_BRANCH/$CI_PIPELINE_ID/content" - fi - echo "Set storage base path to $STORAGE_BASE_PATH" - fi - python3 ./Tests/Marketplace/copy_and_upload_packs.py -a "${PACK_ARTIFACTS}" -e $EXTRACT_FOLDER -pb "$GCS_MARKET_XSOAR_SAAS_BUCKET" -bb "$GCS_BUILD_BUCKET" -s $GCS_MARKET_KEY -n $CI_PIPELINE_ID -c $CI_COMMIT_BRANCH -pbp "$STORAGE_BASE_PATH/packs" --marketplace xsoar_saas - - gcloud auth activate-service-account --key-file="$GCS_ARTIFACTS_KEY" >> "${ARTIFACTS_FOLDER}/logs/gcloud_auth.log" 2>&1 - if [[ $TEST_UPLOAD == "false" ]] && [[ -f "${ARTIFACTS_FOLDER_SERVER_TYPE}/packs_results_upload.json" ]]; then - gsutil cp "${ARTIFACTS_FOLDER_SERVER_TYPE}/packs_results_upload.json" "gs://xsoar-ci-artifacts/content/$CI_COMMIT_SHA/$MARKETPLACE_VERSION/packs_results_upload.json" - echo "packs_results_upload.json upload successfully" - fi - - core_packs_files_count=$(find "${ARTIFACTS_FOLDER_SERVER_TYPE}" -name "corepacks*.json" | wc -l) - if [ "${core_packs_files_count}" -eq 0 ]; then - echo "No core packs files were found, skipping uploading." - else - echo "Uploading ${core_packs_files_count} core packs files." - # Copy core packs files from the artifacts folder to the build bucket: - find "${ARTIFACTS_FOLDER_SERVER_TYPE}" -name "corepacks*.json" -exec gsutil cp -z json "{}" "gs://$GCS_MARKET_XSOAR_SAAS_BUCKET/$STORAGE_BASE_PATH/packs" \; - echo "Successfully uploaded core packs files." - fi - fi - - section_end "Upload Packs To Marketplace Storage" - - - section_start "Download packs from GCP" - - | - if [[ $GCS_MARKET_XSOAR_SAAS_BUCKET == $GCS_PRODUCTION_XSOAR_SAAS_BUCKET ]] && [[ -z $STORAGE_BASE_PATH ]]; then - STORAGE_BASE_PATH="content" - fi - if [[ $GCS_MARKET_XSOAR_SAAS_BUCKET != $GCS_PRODUCTION_XSOAR_SAAS_BUCKET ]] && [[ -z $STORAGE_BASE_PATH ]]; then - STORAGE_BASE_PATH="upload-flow/builds/$CI_COMMIT_BRANCH/$CI_PIPELINE_ID/content" - fi - - PACKS_SRC="gs://$GCS_MARKET_XSOAR_SAAS_BUCKET/$STORAGE_BASE_PATH/packs" - ZIP_FOLDER=$(mktemp -d) - gcloud auth activate-service-account --key-file="$GCS_MARKET_KEY" >> "${ARTIFACTS_FOLDER}/logs/gcloud_auth.log" 2>&1 - echo "successfully activated google cloud service account" - gsutil -m cp -r $PACKS_SRC $ZIP_FOLDER - echo "successfully downloaded index.zip" - - section_end "Download packs from GCP" - - *upload_content_graph - - *upload_dependencies_file - - section_start "Revoking GCP Auth" - - gcloud auth revoke "${GCS_ARTIFACTS_ACCOUNT_NAME}" >> "${ARTIFACTS_FOLDER}/logs/gcloud_auth.log" 2>&1 - - section_end "Revoking GCP Auth" - - - section_start "Zip Content Packs From GCS" - - python3 ./Tests/Marketplace/zip_packs.py -z $ZIP_FOLDER -a $ARTIFACTS_FOLDER -s $GCS_MARKET_KEY - - section_end "Zip Content Packs From GCS" - - job-done - -xsoar-force-pack-upload: - needs: [ "xsoar-prepare-testing-bucket-upload-flow" ] - variables: - SERVER_TYPE: "XSOAR" - ARTIFACTS_FOLDER: "${ARTIFACTS_FOLDER_XSOAR}" - ARTIFACTS_FOLDER_SERVER_TYPE: "${ARTIFACTS_FOLDER_XSOAR}/server_type_${SERVER_TYPE}" - GCS_MARKET_BUCKET_TO_UPLOAD: $GCS_MARKET_BUCKET - MARKETPLACE: "xsoar" - GCS_CURRENT_PRODUCTION_BUCKET: $GCS_PRODUCTION_BUCKET - extends: .force-pack-upload - rules: - - if: '$FORCE_BUCKET_UPLOAD == "true"' - -marketplace-v2-force-pack-upload: - needs: [ "mpv2-prepare-testing-bucket-upload-flow" ] - variables: - SERVER_TYPE: "XSIAM" - ARTIFACTS_FOLDER: "${ARTIFACTS_FOLDER_MPV2}" - ARTIFACTS_FOLDER_SERVER_TYPE: "${ARTIFACTS_FOLDER_MPV2}/server_type_${SERVER_TYPE}" - GCS_MARKET_BUCKET_TO_UPLOAD: $GCS_MARKET_V2_BUCKET - MARKETPLACE: "marketplacev2" - GCS_CURRENT_PRODUCTION_BUCKET: $GCS_PRODUCTION_V2_BUCKET - extends: .force-pack-upload - rules: - - if: '$FORCE_BUCKET_UPLOAD == "true"' - -xpanse-force-pack-upload: - needs: [ "xpanse-prepare-testing-bucket-upload-flow" ] - variables: - SERVER_TYPE: "XPANSE" - ARTIFACTS_FOLDER: "${ARTIFACTS_FOLDER_XPANSE}" - ARTIFACTS_FOLDER_SERVER_TYPE: "${ARTIFACTS_FOLDER_XPANSE}/server_type_${SERVER_TYPE}" - GCS_MARKET_BUCKET_TO_UPLOAD: $GCS_MARKET_XPANSE_BUCKET - MARKETPLACE: "xpanse" - GCS_CURRENT_PRODUCTION_BUCKET: $GCS_PRODUCTION_XPANSE_BUCKET - extends: .force-pack-upload - rules: - - if: '$FORCE_BUCKET_UPLOAD == "true"' - -.force-pack-upload: - tags: - - gke - stage: upload-to-marketplace - extends: - - .default-job-settings - script: - - *check_user_permissions_to_upload_packs - - EXTRACT_FOLDER=$(mktemp -d) - - PACK_ARTIFACTS="${ARTIFACTS_FOLDER_SERVER_TYPE}/content_packs.zip" - - PACKS_DEPENDENCIES="${ARTIFACTS_FOLDER_SERVER_TYPE}/packs_dependencies.json" - - CI_COMMIT_BRANCH=${CI_COMMIT_BRANCH:-unknown} - - GCS_BUILD_BUCKET="marketplace-ci-build" - - | - if [[ $GCS_MARKET_BUCKET_TO_UPLOAD == $GCS_CURRENT_PRODUCTION_BUCKET ]] && [[ -z $STORAGE_BASE_PATH ]]; then - STORAGE_BASE_PATH="content" - fi - - if [[ $GCS_MARKET_BUCKET_TO_UPLOAD != $GCS_CURRENT_PRODUCTION_BUCKET ]] && [[ -z $STORAGE_BASE_PATH ]]; then - STORAGE_BASE_PATH="upload-flow/builds/$CI_COMMIT_BRANCH/$CI_PIPELINE_ID/content" - fi - - - python3 ./Tests/Marketplace/copy_and_upload_packs.py -a "${PACK_ARTIFACTS}" -e $EXTRACT_FOLDER -pb "$GCS_MARKET_BUCKET_TO_UPLOAD" -bb "$GCS_BUILD_BUCKET" -s $GCS_MARKET_KEY -n $CI_PIPELINE_ID -c $CI_COMMIT_BRANCH -p "${PACKS_TO_UPLOAD}" -pbp "$STORAGE_BASE_PATH/packs" --marketplace $MARKETPLACE - - -fan-in-bucket-upload: - tags: - - gke - stage: fan-in - extends: - - .bucket-upload-rule-always - script: - - echo "fan in" - - -slack-notify-bucket-upload: - extends: - - .trigger-slack-notification - - .bucket-upload-rule-always - variables: # Passes the environment variable from the parent pipeline to the child which can be useful for cases when triggering pipeline with alternate env variable value passed in the API call. - PIPELINE_TO_QUERY: $CI_PIPELINE_ID - WORKFLOW: 'Upload Packs to Marketplace Storage' - JOB_NAME: 'fan-in-bucket-upload' - SLACK_CHANNEL: $SLACK_CHANNEL - SLACK_JOB: 'true' - SLACK_ALLOW_FAILURE: 'false' - CI_PROJECT_ID: $CI_PROJECT_ID - CI_SERVER_URL: $CI_SERVER_URL - JIRA_SERVER_URL: $JIRA_SERVER_URL - JIRA_VERIFY_SSL: $JIRA_VERIFY_SSL - JIRA_API_KEY: $JIRA_API_KEY - JIRA_PROJECT_ID: $JIRA_PROJECT_ID - JIRA_ISSUE_UNRESOLVED_TRANSITION_NAME: $JIRA_ISSUE_UNRESOLVED_TRANSITION_NAME - -upload-id-set-bucket: - tags: - - gke - stage: prepare-testing-bucket - extends: - - .bucket-upload-rule - - .default-job-settings - variables: - PRODUCT_TYPE: "XSOAR" - SERVER_TYPE: "XSOAR" - INSTANCE_ROLE: "Server Master" - ARTIFACTS_FOLDER: "${ARTIFACTS_FOLDER_XSOAR}" - ARTIFACTS_FOLDER_INSTANCE: "${ARTIFACTS_FOLDER}/instance_${INSTANCE_ROLE}" - ARTIFACTS_FOLDER_SERVER_TYPE: "${ARTIFACTS_FOLDER}/server_type_${SERVER_TYPE}" - script: - # This is needed because we still use id_set.json in other repos - - | - if [[ $TEST_UPLOAD == "true" ]]; then - echo "Skipping uploading id-set to the bucket in test upload-flow" - job-done - exit 0 - fi - - - !reference [.create-id-set-xsoar] - - gcloud auth activate-service-account --key-file="$GCS_MARKET_KEY" >> "${ARTIFACTS_FOLDER}/logs/gcloud_auth.log" 2>&1 - - gsutil cp "${ARTIFACTS_FOLDER_SERVER_TYPE}/id_set.json" "gs://$GCS_MARKET_BUCKET/content/id_set.json" - - job-done - -sync-buckets-between-projects: - # syncs buckets from oproxy-dev project to xdr-xsoar-content-dev-01 project - tags: - - gke - extends: - - .bucket-upload-rule - - .default-job-settings - stage: upload-to-marketplace - needs: ["upload-packs-to-marketplace", "upload-packs-to-marketplace-v2", "upload-packs-to-xpanse-marketplace", "upload-packs-to-xsoar-saas-marketplace"] - when: always - variables: - MARKETPLACE_XSOAR_PROD: "marketplace-xsoar" - MARKETPLACE_V2_PROD: "marketplace-xsiam" - MARKETPLACE_XPANSE_PROD: "marketplace-xpanse" - script: - - | - if [[ $TEST_UPLOAD == "true" ]]; then - echo "Skipping syncing buckets in test upload-flow" - exit 0 - fi - - - | - if [[ -z "$GCS_XSOAR_CONTENT_DEV_KEY" ]] || [[ -z "$GCS_XSOAR_CONTENT_PROD_KEY" ]]; then - echo "GCS_XSOAR_CONTENT_DEV_KEY or GCS_XSOAR_CONTENT_PROD_KEY not set, cannot perform sync" - job-done - exit 1 - else - gcloud auth activate-service-account --key-file="$GCS_XSOAR_CONTENT_DEV_KEY" >> "${ARTIFACTS_FOLDER}/logs/gcloud_auth.log" 2>&1 - - echo "Syncing gs://marketplace-xsoar-dev" - gsutil -m rsync -r gs://marketplace-saas-dist gs://marketplace-xsoar-dev - echo "Syncing gs://marketplace-xsiam-dev" - gsutil -m rsync -r gs://marketplace-v2-dist gs://marketplace-xsiam-dev - echo "Syncing gs://marketplace-xpanse-dev" - gsutil -m rsync -r gs://xpanse-dist gs://marketplace-xpanse-dev - - ./Tests/scripts/validate_synced_buckets.sh "dev" - - gcloud auth activate-service-account --key-file="$GCS_XSOAR_CONTENT_PROD_KEY" >> "${ARTIFACTS_FOLDER}/logs/gcloud_auth.log" 2>&1 - - echo "Syncing gs://marketplace-xsoar-prod-us" - gsutil -m rsync -r gs://marketplace-saas-dist gs://marketplace-xsoar-prod-us - echo "Syncing gs://marketplace-xsiam-prod-us" - gsutil -m rsync -r gs://marketplace-v2-dist gs://marketplace-xsiam-prod-us - echo "Syncing gs://marketplace-xpanse-prod-us" - gsutil -m rsync -r gs://xpanse-dist gs://marketplace-xpanse-prod-us - - ./Tests/scripts/validate_synced_buckets.sh "prod-us" - - echo "Bucket sync completed" - fi diff --git a/.gitlab/ci/.gitlab-ci.build-machines-cleanup.yml b/.gitlab/ci/.gitlab-ci.build-machines-cleanup.yml deleted file mode 100644 index 9ceb9a1117ff..000000000000 --- a/.gitlab/ci/.gitlab-ci.build-machines-cleanup.yml +++ /dev/null @@ -1,78 +0,0 @@ -.build-machines-cleanup-rule: - rules: - - if: '$BUILD_MACHINES_CLEANUP == "true"' - -.build-machines-cleanup-rule-always: - rules: - - if: '$BUILD_MACHINES_CLEANUP == "true"' - when: always - -stages: - - security - -build-machines-cleanup: - stage: cleanup - extends: - - .build-machines-cleanup-rule - - .default-job-settings - script: - - EXIT_CODE=0 - - !reference [.download-demisto-conf] - - !reference [.lock-machine] - - !reference [.uninstall-packs-and-reset-bucket-cloud] - - !reference [.cloud-machine-information] - - job-done - - exit "${EXIT_CODE}" - after_script: - - source .gitlab/helper_functions.sh - - !reference [ .unlock-machine ] - timeout: 4 hours - variables: - CLOUD_MACHINES_TYPE: "nightly" - CLOUD_MACHINES_COUNT: "all" - GCS_LOCKS_PATH: "content-locks/locks-xsiam-ga-nightly" - INSTANCE_ROLE: "XSIAM" - PRODUCT_TYPE: "XSIAM" - SERVER_TYPE: "XSIAM" - GCS_QUEUE_FILE: "queue-ga" - CLOUD_SERVERS_FILE: "xsiam_servers_path" - CLOUD_API_KEYS: $XSIAM_API_KEYS - CLOUD_API_TOKENS: $XSIAM_TOKENS - ARTIFACTS_FOLDER: "${ARTIFACTS_FOLDER_MPV2}" - ARTIFACTS_FOLDER_INSTANCE: "${ARTIFACTS_FOLDER_MPV2}/instance_${INSTANCE_ROLE}" - ARTIFACTS_FOLDER_SERVER_TYPE: "${ARTIFACTS_FOLDER_MPV2}/server_type_${SERVER_TYPE}" - ENV_RESULTS_PATH: "${ARTIFACTS_FOLDER_SERVER_TYPE}/env_results.json" - GCS_MARKET_BUCKET: "${GCS_MARKET_V2_BUCKET}" - GCS_SOURCE_BUCKET: "$GCS_PRODUCTION_V2_BUCKET" - GCS_MACHINES_BUCKET: "marketplace-v2-dist-dev/upload-flow/builds-xsiam" - MARKETPLACE_NAME: "marketplacev2" - NON_REMOVABLE_PACKS: "Base" - RESET_CORE_PACK_VERSION: "false" - -fan-in-build-machines-cleanup: - tags: - - gke - stage: fan-in - extends: - - .build-machines-cleanup-rule-always - script: - - echo "fan in" - -slack-notify-build-machines-cleanup: - extends: - - .trigger-slack-notification - - .build-machines-cleanup-rule-always - variables: # Passes the environment variable from the parent pipeline to the child which can be useful for cases when triggering pipeline with alternate env variable value passed in the API call. - PIPELINE_TO_QUERY: $CI_PIPELINE_ID - WORKFLOW: 'Build Machines Cleanup' - JOB_NAME: 'fan-in-build-machines-cleanup' - SLACK_CHANNEL: $SLACK_CHANNEL - SLACK_JOB: 'true' - SLACK_ALLOW_FAILURE: 'false' - CI_PROJECT_ID: $CI_PROJECT_ID - CI_SERVER_URL: $CI_SERVER_URL - JIRA_SERVER_URL: $JIRA_SERVER_URL - JIRA_VERIFY_SSL: $JIRA_VERIFY_SSL - JIRA_API_KEY: $JIRA_API_KEY - JIRA_PROJECT_ID: $JIRA_PROJECT_ID - JIRA_ISSUE_UNRESOLVED_TRANSITION_NAME: $JIRA_ISSUE_UNRESOLVED_TRANSITION_NAME diff --git a/.gitlab/ci/.gitlab-ci.global.yml b/.gitlab/ci/.gitlab-ci.global.yml deleted file mode 100644 index edd2a455a51a..000000000000 --- a/.gitlab/ci/.gitlab-ci.global.yml +++ /dev/null @@ -1,574 +0,0 @@ -.default-cache: - cache: - key: - files: - - "poetry.lock" - - ".pre-commit-config_template.yaml" - prefix: dev-content - paths: - - $PIP_CACHE_DIR - - $PRE_COMMIT_HOME - - .venv/ - - node_modules/ - - .npm/ - policy: pull - - -.setup-network-certs: &setup-network-certs - - section_start "Setup network certs" --collapsed - - chmod 700 ${CERTIFICATE_SETUP_SCRIPT} - - source ${CERTIFICATE_SETUP_SCRIPT} - - section_end "Setup network certs" - -.setup-artifactory: &setup-artifactory - - section_start "Setup Artifactory" --collapsed - - chmod 700 ${ARTIFACTORY_SETUP_SCRIPT} - - source ${ARTIFACTORY_SETUP_SCRIPT} - - section_end "Setup Artifactory" - -### Global Script Snippets ### - -.create-id-set: - - section_start "Create ID Set" --collapsed - - demisto-sdk create-id-set -o ./Tests/id_set.json >> "${ARTIFACTS_FOLDER_SERVER_TYPE}/logs/create_id_set.log" - - cp ./Tests/id_set.json "${ARTIFACTS_FOLDER_SERVER_TYPE}" - - section_end "Create ID Set" - -.create-id-set-xsoar: - - section_start "Create ID Set" --collapsed - - demisto-sdk create-id-set -o ./Tests/id_set.json --marketplace "xsoar" >> "${ARTIFACTS_FOLDER_SERVER_TYPE}/logs/create_id_set.log" - - cp ./Tests/id_set.json "${ARTIFACTS_FOLDER_SERVER_TYPE}" - - if [ -f ./all_removed_items_from_id_set.json ]; then cp ./all_removed_items_from_id_set.json "${ARTIFACTS_FOLDER_SERVER_TYPE}/logs"; fi - - if [ -f ./items_removed_manually_from_id_set.json ]; then cp ./items_removed_manually_from_id_set.json "${ARTIFACTS_FOLDER_SERVER_TYPE}/logs"; fi - - section_end "Create ID Set" - -.create-id-set-mp-v2: - - section_start "Create ID Set" --collapsed - - demisto-sdk create-id-set -o ./Tests/id_set.json --marketplace "marketplacev2" >> "${ARTIFACTS_FOLDER_SERVER_TYPE}/logs/create_id_set.log" - - cp ./Tests/id_set.json "${ARTIFACTS_FOLDER_SERVER_TYPE}" - - if [ -f ./all_removed_items_from_id_set.json ]; then cp ./all_removed_items_from_id_set.json "${ARTIFACTS_FOLDER_SERVER_TYPE}/logs"; fi - - if [ -f ./items_removed_manually_from_id_set.json ]; then cp ./items_removed_manually_from_id_set.json "${ARTIFACTS_FOLDER_SERVER_TYPE}/logs"; fi - - section_end "Create ID Set" - -.create-id-set-xpanse: - - section_start "Create ID Set" --collapsed - - demisto-sdk create-id-set -o ./Tests/id_set.json --marketplace "xpanse" >> "${ARTIFACTS_FOLDER_SERVER_TYPE}/logs/create_id_set.log" - - cp ./Tests/id_set.json "${ARTIFACTS_FOLDER_SERVER_TYPE}" - - if [ -f ./all_removed_items_from_id_set.json ]; then cp ./all_removed_items_from_id_set.json "${ARTIFACTS_FOLDER_SERVER_TYPE}/logs"; fi - - if [ -f ./items_removed_manually_from_id_set.json ]; then cp ./items_removed_manually_from_id_set.json "${ARTIFACTS_FOLDER_SERVER_TYPE}/logs"; fi - - section_end "Create ID Set" - -.download-demisto-conf: - - section_start "Download content-test-conf and infra" --collapsed - - ./Tests/scripts/download_conf_repos.sh 2>&1 | tee --append "${ARTIFACTS_FOLDER}/logs/download_conf_repos.log" - - section_end "Download content-test-conf and infra" - -.secrets-fetch: - - section_start "Secrets Fetch" --collapsed - - SECRET_CONF_PATH=$(cat secret_conf_path) - - python3 ./Tests/scripts/add_secrets_file_to_build.py -sa "$GSM_SERVICE_ACCOUNT" -sf "$SECRET_CONF_PATH" -u "$DEMISTO_USERNAME" -p "$DEMISTO_PASSWORD" --gsm_project_id_dev "$GSM_PROJECT_ID_DEV" --gsm_project_id_prod "$GSM_PROJECT_ID" >> "${ARTIFACTS_FOLDER}/logs/handle_secrets.log" - - section_end "Secrets Fetch" - -.check_build_files_are_up_to_date: &check_build_files_are_up_to_date - - section_start "Check Build Files Are Up To Date" - - | - if [[ -n "${DEMISTO_SDK_NIGHTLY}" ]] || [[ -n "${NIGHTLY}" ]] || [[ -n "${BUCKET_UPLOAD}" ]] || [[ -n "${SLACK_JOB}" ]] || [[ "${BUILD_MACHINES_CLEANUP}" == "true" ]] || [[ "${DELETE_MISMATCHED_BRANCHES}" == "true" ]] || [[ "${SECURITY_SCANS}" == "true" ]] || [[ "${DEMISTO_TEST_NATIVE_CANDIDATE}" == "true" ]] || [[ "${CI_COMMIT_BRANCH}" == "master" ]] || [[ "${SDK_RELEASE}" == "true" ]] || [[ "${TRIGGER_CONTRIBUTION_BUILD}" == "true" ]]; then - echo "Running a build which doesn't require build files check validation" - else - ./Tests/scripts/is_file_up_to_date.sh .gitlab $CI_COMMIT_BRANCH - # we want to checkout if it's not up-to-date - ./Tests/scripts/is_file_up_to_date.sh poetry.lock $CI_COMMIT_BRANCH true - ./Tests/scripts/is_file_up_to_date.sh pyproject.toml $CI_COMMIT_BRANCH true - ./Tests/scripts/is_file_up_to_date.sh Tests/Marketplace/core_packs_list.json $CI_COMMIT_BRANCH true - ./Tests/scripts/is_file_up_to_date.sh Tests/Marketplace/core_packs_mpv2_list.json $CI_COMMIT_BRANCH true - ./Tests/scripts/is_file_up_to_date.sh Tests/Marketplace/core_packs_xpanse_list.json $CI_COMMIT_BRANCH true - fi - - section_end "Check Build Files Are Up To Date" - -.stop_contrib_external_build: &stop_contrib_external_build - - - section_start "Stop contrib external build" - - | - if [[ $CI_COMMIT_BRANCH =~ ^contrib/* && $CI_PIPELINE_SOURCE = "push" && $(curl --get --header "Accept: application/vnd.github.v3.raw" --header "Authorization: token $GITHUB_TOKEN" "https://api.github.com/repos/demisto/content/pulls?state=open&base=master" --data-urlencode "head=demisto:${CI_COMMIT_BRANCH}" | jq) = '[]' ]]; then - echo "not running on contrib/ branches when there is no open internal PR or the pipeline was manually triggered" - set -e - exit 1 - fi - - section_end "Stop contrib external build" - -.create_artifacts_and_server_type_instance_folders: &create_artifacts_and_server_type_instance_folders - - section_start "Create Artifacts, Server Instance, Server Type folders" --collapsed - - | - if [[ -n "${ARTIFACTS_FOLDER}" ]] && [[ ! -d "${ARTIFACTS_FOLDER}/logs" ]]; then - echo "Creating Artifacts folder: ${ARTIFACTS_FOLDER} and it's log folder" - mkdir -p -m 777 "${ARTIFACTS_FOLDER}/logs" # using the -p to create the logs folder as well. - fi - - | - if [[ -n "${ARTIFACTS_FOLDER_INSTANCE}" ]] && [[ ! -d "${ARTIFACTS_FOLDER_INSTANCE}/logs" ]]; then - echo "Creating Artifacts instance folder: ${ARTIFACTS_FOLDER_INSTANCE} and it's log folder" - mkdir -p -m 777 "${ARTIFACTS_FOLDER_INSTANCE}/logs" # using the -p to create the logs folder as well. - echo "${INSTANCE_ROLE}" > "${ARTIFACTS_FOLDER_INSTANCE}/instance_role.txt" - fi - - | - if [[ -n "${ARTIFACTS_FOLDER_SERVER_TYPE}" ]] && [[ ! -d "${ARTIFACTS_FOLDER_SERVER_TYPE}/logs" ]]; then - echo "Creating Artifacts Server type folder: ${ARTIFACTS_FOLDER_SERVER_TYPE} and it's log folder" - mkdir -p -m 777 "${ARTIFACTS_FOLDER_SERVER_TYPE}/logs" # using the -p to create the logs folder as well. - echo "${SERVER_TYPE}" > "${ARTIFACTS_FOLDER_SERVER_TYPE}/server_type.txt" - fi - - section_end "Create Artifacts, Server Instance, Server Type folders" - -.clone_and_export_variables: &clone_and_export_variables - - section_start "Git - Job Start Actions" --collapsed - - git fetch origin master:refs/remotes/origin/master - - git checkout -B $CI_COMMIT_BRANCH $CI_COMMIT_SHA - - git config diff.renameLimit 6000 - - section_end "Git - Job Start Actions" - - section_start "Source BASH Environment" --collapsed - - | - if [[ -f "$BASH_ENV" ]]; then - source "$BASH_ENV" - fi - - source .circleci/content_release_vars.sh - # DEMISTO_SDK_GRAPH_FORCE_CREATE set to true to create graph from scratch. - - | - if [[ $NIGHTLY ]]; then - echo "set DEMISTO_SDK_GRAPH_FORCE_CREATE to true to create graph from scratch" - export DEMISTO_SDK_GRAPH_FORCE_CREATE=true - echo "DEMISTO_SDK_GRAPH_FORCE_CREATE was set to true to create graph from scratch" - echo $DEMISTO_SDK_GRAPH_FORCE_CREATE - fi - - section_end "Source BASH Environment" - - section_start "Granting execute permissions on files" --collapsed - - chmod +x ./Tests/scripts/* - - chmod +x ./Tests/Marketplace/* - - section_end "Granting execute permissions on files" - -.get_contribution_pack: &get_contribution_pack - - section_start "getting contrib packs" --collapsed - - | - if [[ -n "${CONTRIB_BRANCH}" ]]; then - USERNAME=$(echo $CONTRIB_BRANCH | cut -d ":" -f 1) - BRANCH=$(echo $CONTRIB_BRANCH | cut -d ":" -f 2) - python3 ./Utils/update_contribution_pack_in_base_branch.py -p $PULL_REQUEST_NUMBER -b $BRANCH -u $USERNAME -c $CONTRIB_REPO -gt $GITHUB_TOKEN - fi - - section_end "getting contrib packs" - -.install_venv: &install_venv - - section_start "Installing Virtualenv" --collapsed - # we still need to install even if cached. if cached, `poetry` will handle it - - echo "installing venv, with always copy:${POETRY_VIRTUALENVS_OPTIONS_ALWAYS_COPY}" - - NO_HOOKS=1 .hooks/bootstrap | tee --append "${ARTIFACTS_FOLDER}/logs/installations.log" - - echo "Checking if pyproject.toml is consistent with poetry.lock" - - poetry lock --check - - npm ci --cache .npm --prefer-offline | tee --append "${ARTIFACTS_FOLDER}/logs/installations.log" - - source ./.venv/bin/activate - - | - if [[ -n "${DEMISTO_SDK_NIGHTLY}" || -n "${OVERRIDE_SDK_REF}" ]]; then - echo "Installing SDK from ${SDK_REF}" | tee --append "${ARTIFACTS_FOLDER}/logs/installations.log" - pip3 uninstall -y demisto-sdk | tee --append "${ARTIFACTS_FOLDER}/logs/installations.log" - pip3 install "git+https://github.com/demisto/demisto-sdk@${SDK_REF}#egg=demisto-sdk" | tee --append "${ARTIFACTS_FOLDER}/logs/installations.log" - else - echo "Using SDK from pyproject.toml" | tee --append "${ARTIFACTS_FOLDER}/logs/installations.log" - fi - - | - python3 --version | tee -a "${ARTIFACTS_FOLDER}/logs/installed_python_libraries.log" - python3 -m pip list | tee -a "${ARTIFACTS_FOLDER}/logs/installed_python_libraries.log" - - section_end "Installing Virtualenv" - -.ssh-config-setup: - - section_start "SSH config setup" --collapsed - - cp $GCP_SSH_CONFIGURATION ~/.ssh/config - - chmod 700 ~/.ssh/config - - section_end "SSH config setup" - -.install_ssh_keys: &install_ssh_keys - - section_start "Installing SSH keys" --collapsed - - eval $(ssh-agent -s) - - chmod 400 $OREGON_CI_KEY - - ssh-add $OREGON_CI_KEY - - mkdir -p ~/.ssh - - chmod 700 ~/.ssh - - section_end "Installing SSH keys" - -.install_node_modules: &install_node_modules - - section_start "Installing node modules" --collapsed - - source $NVM_DIR/nvm.sh - - nvm use default | tee --append "${ARTIFACTS_FOLDER}/logs/installations_node.log" - - echo "Installing Node Modules" | tee --append "${ARTIFACTS_FOLDER}/logs/installations_node.log" - - npm ci --cache .npm --prefer-offline >> "${ARTIFACTS_FOLDER}/logs/installations_node.log" 2>&1 - - npm list --json >> "${ARTIFACTS_FOLDER}/logs/installations_node.log" 2>&1 - - npm link jsdoc-to-markdown@5.0.3 | tee >> "${ARTIFACTS_FOLDER}/logs/installations_node.log" 2>&1 # disable-secrets-detection - - section_end "Installing node modules" - -.get_last_upload_commit: &get_last_upload_commit - - section_start "Getting last bucket upload commit" --collapsed - - gcloud auth activate-service-account --key-file="$GCS_MARKET_KEY" >> "${ARTIFACTS_FOLDER}/logs/gcloud_auth.log" 2>&1 - - gsutil cp "gs://$GCS_PRODUCTION_BUCKET/content/packs/index.json" "${ARTIFACTS_FOLDER_SERVER_TYPE}/previous_index.json" - - export LAST_UPLOAD_COMMIT=$(cat "${ARTIFACTS_FOLDER_SERVER_TYPE}/previous_index.json" | jq -r ".\"commit\"") - - section_end "Getting last bucket upload commit" - -.neo4j-setup: &neo4j-setup - - section_start "Neo4j Setup" --collapsed - - neo4j-admin dbms set-initial-password contentgraph - - neo4j start - - section_end "Neo4j Setup" - -.build_parameters: &build_parameters - - section_start "Build Parameters" --collapsed - - echo "Environment Variables:" - - set | grep -E "^ARTIFACTS_FOLDER.*=|^JIRA_.*=|^NIGHTLY=|^INSTANCE_TESTS=|^SERVER_BRANCH_NAME=|^ARTIFACT_BUILD_NUM=|^DEMISTO_SDK_NIGHTLY=|^TIME_TO_LIVE=|^CONTRIB_BRANCH=|^FORCE_PACK_UPLOAD=|^PACKS_TO_UPLOAD=|^BUCKET_UPLOAD=|^STORAGE_BASE_PATH=|^OVERRIDE_ALL_PACKS=|^GCS_MARKET_BUCKET=|^GCS_MARKET_V2_BUCKET=|^GCS_MARKET_XPANSE_BUCKET=|^SLACK_.*=|^NVM_DIR=|^NODE_VERSION=|^PATH=|^ARTIFACTS_FOLDER=|^ARTIFACTS_FOLDER_INSTANCE=|^ARTIFACTS_FOLDER_SERVER_TYPE=|^ENV_RESULTS_PATH=|^LAST_UPLOAD_COMMIT=|^DEMISTO_SDK_LOG_FILE_SIZE=|^DEMISTO_SDK_LOG_FILE_COUNT=|^DEMISTO_SDK_LOG_FILE_PATH=|^DEMISTO_SDK_LOG_NO_COLORS=|^DEMISTO_SDK_LOG_NOTIFY_PATH=|^POETRY_VIRTUALENVS_OPTIONS_ALWAYS_COPY=|^DEMISTO_SDK_NIGHTLY=|^OVERRIDE_SDK_REF=|^SDK_REF=" | sort - - echo "Versions Installed:" - - python --version - - python3 --version - - poetry --version - - pip3 --version - - node --version - - npm --version - - jsdoc2md --version - - demisto-sdk --version - - section_end "Build Parameters" - -.gitlab_ci_build_parameters: &gitlab_ci_build_parameters - - section_start "Gitlab CI Build Parameters" --collapsed - - set | grep -E "^CI_.*=|^GITLAB.*=" | sort - - section_end "Gitlab CI Build Parameters" - -.checkout-upload-commit-content-nightly: &checkout-upload-commit-content-nightly - - section_start "Checkout upload commit content nightly" --collapsed - - | - if [[ -n "${NIGHTLY}" && "${CI_COMMIT_BRANCH}" == "master" ]]; then - if [[ ! -d "${CI_PROJECT_DIR}/artifacts/production_packs" ]]; then - echo "content production packs do not exist in ${CI_PROJECT_DIR}/artifacts" - exit 1 - fi - rm -rf ./Packs - echo "copying production Packs folder from ${CI_PROJECT_DIR}/artifacts/production_packs to ./Packs" - cp -r ${CI_PROJECT_DIR}/artifacts/production_packs ./Packs - git config core.fileMode false # used to tell git not to identify permission changes on files as changes - chmod -R 777 ./Packs # required for the lint, we use the permissions of the file when running pytest within the containers - echo "the Packs changes between upload commit ${LAST_UPLOAD_COMMIT} to master commit ${CI_COMMIT_SHA} is:" - git status -- Packs # show the differences between the upload commit to the master branch for the Packs folder - echo "The Packs folder is in the state of commit $LAST_UPLOAD_COMMIT" - else - echo "not checking out to the latest upload commit $LAST_UPLOAD_COMMIT because the build is not content nightly" - fi - - | - - section_end "Checkout upload commit content nightly" - -.export_cloud_machine_constants: &export_cloud_machine_constants - # exporting the machine credentials - - CLOUD_SERVERS_PATH=$(cat $CLOUD_SERVERS_FILE) - - cat "${CLOUD_API_KEYS}" > "cloud_api_keys.json" - - cat "${CLOUD_API_TOKENS}" > "cloud_api_tokens.json" - - - IFS=', ' read -r -a CLOUD_CHOSEN_MACHINE_ID_ARRAY <<< "${CLOUD_CHOSEN_MACHINE_IDS}" - - | - for CLOUD_CHOSEN_MACHINE_ID in "${CLOUD_CHOSEN_MACHINE_ID_ARRAY[@]}"; do - export XSIAM_SERVER_CONFIG=$(jq -r ".[\"${CLOUD_CHOSEN_MACHINE_ID}\"]" < "$CLOUD_SERVERS_PATH") - export DEMISTO_BASE_URL=$(echo "$XSIAM_SERVER_CONFIG" | jq -r ".[\"base_url\"]") - export XSIAM_AUTH_ID=$(echo "$XSIAM_SERVER_CONFIG" | jq -r ".[\"x-xdr-auth-id\"]") - export DEMISTO_API_KEY=$(jq -r ".[\"${CLOUD_CHOSEN_MACHINE_ID}\"]" < "cloud_api_keys.json") - export XSIAM_TOKEN=$(jq -r ".[\"${CLOUD_CHOSEN_MACHINE_ID}\"]" < "cloud_api_tokens.json") - break - done - -.default-before-script: - before_script: - - source .gitlab/helper_functions.sh - - *setup-network-certs - - *setup-artifactory - - *stop_contrib_external_build - - *create_artifacts_and_server_type_instance_folders - - *clone_and_export_variables - - *check_build_files_are_up_to_date - - *install_node_modules - - *install_venv - - *get_contribution_pack - - *get_last_upload_commit - - *install_ssh_keys - - *neo4j-setup - - *build_parameters - - *gitlab_ci_build_parameters - - *checkout-upload-commit-content-nightly -.default-after-script: - - source .gitlab/helper_functions.sh - - *setup-network-certs - - *setup-artifactory - - *install_node_modules - - *install_venv - -.add-content-production-to-artifacts: - - section_start "Clone production content and add it to artifacts" --collapsed - - mkdir content_production - - cd content_production - - git init > /dev/null 2>&1 - - git remote add origin https://gitlab-ci-token:${CI_JOB_TOKEN}@${CI_SERVER_HOST}/${CI_PROJECT_NAMESPACE}/content.git - - git fetch --depth 1 origin $LAST_UPLOAD_COMMIT - - git checkout FETCH_HEAD >${ARTIFACTS_FOLDER}/logs/add-content-production-to-artifacts.log 2>&1 - - cp -r ./Packs ${ARTIFACTS_FOLDER}/production_packs - - echo "checked out ${LAST_UPLOAD_COMMIT} which is the last successful upload commit" - - section_end "Clone production content and add it to artifacts" - - job-done - - -.default-job-settings: - interruptible: true - extends: - - .default-cache - - .default-before-script - needs: - - job: cloning-content-repo-last-upload-commit - optional: true - -.trigger-slack-notification: - stage: .post - trigger: - include: - - local: .gitlab/ci/.gitlab-ci.slack-notify.yml - inherit: # see https://gitlab.com/gitlab-org/gitlab-runner/-/issues/27775 - variables: false - -.destroy_xsoar_instances: - - section_start "Destroy Instances" - - python3 ./Tests/scripts/destroy_instances.py --artifacts-dir "${ARTIFACTS_FOLDER}" --env-file "${ENV_RESULTS_PATH}" --instance-role "${INSTANCE_ROLE}" - - destroy_instances_exit_code=$? - - | - if [ "${destroy_instances_exit_code}" -ne 0 ]; then - echo "Failed to destroy instances of role ${INSTANCE_ROLE}, exit code: ${destroy_instances_exit_code}" - fi - - section_end "Destroy Instances" - -.lock-machine: - - section_start "Lock Machine" --collapsed - - echo "Authenticating GCP" - - gcloud auth activate-service-account --key-file="$GCS_ARTIFACTS_KEY" >> "${ARTIFACTS_FOLDER}/logs/gcloud_auth.log" 2>&1 - - echo "Auth done successfully" - - ./Tests/scripts/wait_in_line_for_cloud_env.sh "$CLOUD_MACHINES_TYPE" - - source CloudEnvVariables - - echo "CLOUD Chosen machine ids are:${CLOUD_CHOSEN_MACHINE_IDS}" | tee "${ARTIFACTS_FOLDER}/logs/lock_file.txt" - - section_end "Lock Machine" - -.unlock-machine: - - section_start "Unlock Machine" --collapsed - - | - if [[ -f "CloudEnvVariables" ]]; then - source CloudEnvVariables - - if [[ -n "${CLOUD_CHOSEN_MACHINE_IDS}" ]]; then - if [[ -e "${ARTIFACTS_FOLDER}/logs/lock_file.txt" ]]; then - echo "Job finished, removing lock file for machine ids:${CLOUD_CHOSEN_MACHINE_IDS}" - gcloud auth activate-service-account --key-file="$GCS_ARTIFACTS_KEY" >> "${ARTIFACTS_FOLDER}/logs/gcloud_auth.log" 2>&1 - gsutil rm "gs://xsoar-ci-artifacts/$GCS_LOCKS_PATH/machines_locks/*-lock-$CI_JOB_ID" - echo "Finished removing lock file(s)" - else - echo "No lock file found, skipping unlocking" - fi - else - echo "No machine ids were chosen, skipping unlocking" - fi - else - echo "No CloudEnvVariables file found, skipping unlocking" - fi - - section_end "Unlock Machine" - -.cloud-machine-information: - - section_start "Cloud Machine information" - - ./Tests/scripts/print_cloud_machine_details.sh - - section_end "Cloud Machine information" - -.uninstall-packs-and-reset-bucket-cloud: - - section_start "Uninstall Packs and Reset Bucket Cloud" --collapsed - - ./Tests/scripts/uninstall_packs_and_reset_bucket_cloud.sh || EXIT_CODE=$? - - section_end "Uninstall Packs and Reset Bucket Cloud" - - -.validate_content_test_conf_branch_merged: - - section_start "Validate content-test-conf Branch Merged" - - | - if [[ "${CI_COMMIT_BRANCH}" = "master" ]]; then - echo "Skipping, Should not run on master branch." - elif [ 'true' = $(./Tests/scripts/check_if_branch_exist.sh -u "gitlab-ci-token" -t "${CI_JOB_TOKEN}" -h "${CI_SERVER_HOST}" --repo "${CI_PROJECT_NAMESPACE}/content-test-conf" -b "${CI_COMMIT_BRANCH}") ]; then - RED='\033[0;31m' - NC='\033[0m' - echo -e "${RED}ERROR: Found a branch with the same name:${CI_COMMIT_BRANCH} in contest-test-conf repository.\n Merge it in order to merge the current branch into content repo.${NC}" - job-done - exit 1 - else - echo "Couldn't find a branch with the name:${CI_COMMIT_BRANCH} in contest-test-conf repository." - fi - - section_end "Validate content-test-conf Branch Merged" - -.pre-commit-settings: - tags: - - gce - stage: unittests-and-validations - artifacts: - reports: - coverage_report: - coverage_format: cobertura - path: ${CI_PROJECT_DIR}/artifacts/coverage_report/coverage.xml - expire_in: 30 days - paths: - - ${CI_PROJECT_DIR}/unit-tests - - ${CI_PROJECT_DIR}/artifacts/* - - ${CI_PROJECT_DIR}/pipeline_jobs_folder/* - when: always - services: - - name: ${DOCKER_IO}/library/docker:20.10.12-dind - alias: docker - variables: - DOCKER_HOST: tcp://docker:2375 - DOCKER_DRIVER: overlay2 - DOCKER_TLS_CERTDIR: "" - extends: - - .default-job-settings - - -.run-pre-commit: - cache: - policy: pull-push - extends: - - .pre-commit-settings - script: - - section_start "Test Infrastructure" - - python3 -m pytest ./Tests/scripts/infrastructure_tests/ -v --disable-warnings - - python3 -m pytest ./Tests/Marketplace/Tests/ -v --disable-warnings - - python3 -m pytest ./Tests/tests -v --disable-warnings - - python3 -m pytest ./Tests/private_build/ -v --disable-warnings - - python3 -m pytest Utils -v --disable-warnings - - | - if [ -n "${DEMISTO_SDK_NIGHTLY}" ]; then - ./Tests/scripts/sdk_pylint_check.sh - fi - - section_end "Test Infrastructure" - - - section_start "Revoking GCP Auth and Configure Docker" - # we need to configure the docker with the registry in order to be able to pull the images - - gcloud auth revoke "${GCS_ARTIFACTS_ACCOUNT_NAME}" >> "${ARTIFACTS_FOLDER}/logs/gcloud_auth.log" 2>&1 - - gcloud auth configure-docker ${DOCKER_IO_DOMAIN} >> "${ARTIFACTS_FOLDER}/logs/configure_docker_with_registry.log" 2>&1 - - section_end "Revoking GCP Auth and Configure Docker" - # temp solution not to spam the build - - SHOULD_LINT_ALL=$(./Tests/scripts/should_lint_all.sh) - - PRE_COMMIT_SUCCESS=0 - - | - if [[ -n $BUCKET_UPLOAD && $TEST_UPLOAD == "true" ]]; then - echo "Skipping validations when uploading to a test bucket." - else - if [[ -n $BUCKET_UPLOAD ]]; then - demisto-sdk pre-commit -g --prev-version $LAST_UPLOAD_COMMIT --mode=nightly || PRE_COMMIT_SUCCESS=1 - else - if [[ -n "${SHOULD_LINT_ALL}" ]]; then - echo "Pre Commit all files" - # if we need to pre-commit all anyway we need the graph, and it's better (resource-wise) to create it here. - demisto-sdk graph update - unset DEMISTO_SDK_GRAPH_FORCE_CREATE # The graph is already up, no need to force create it - demisto-sdk pre-commit -a --mode=nightly || PRE_COMMIT_SUCCESS=1 - else - echo "Pre Commit only changed files" - demisto-sdk pre-commit --mode=ci || PRE_COMMIT_SUCCESS=1 - fi - fi - fi - - | - if [[ -d coverage_report ]]; then - cp -r coverage_report artifacts/coverage_report - fi - - | - if [[ -n "${NIGHTLY}" && "$CI_COMMIT_BRANCH" == "master" ]]; then - python3 Utils/upload_code_coverage_report.py --service_account $GCS_MARKET_KEY --source_file_name "${ARTIFACTS_FOLDER}/coverage_report/coverage.json" --minimal_file_name "${ARTIFACTS_FOLDER}/coverage_report/coverage-min.json" - fi - - - echo "PRE_COMMIT_SUCCESS=$PRE_COMMIT_SUCCESS" - - job-done - - exit $PRE_COMMIT_SUCCESS - -.run-validations: - stage: unittests-and-validations - extends: - - .default-job-settings - artifacts: - expire_in: 30 days - paths: - - ${CI_PROJECT_DIR}/artifacts/* - - ${CI_PROJECT_DIR}/pipeline_jobs_folder/* - when: always - script: - - section_start "Look For Secrets" - - demisto-sdk secrets --post-commit --ignore-entropy - - section_end "Look For Secrets" - - section_start "Copy conf.json To Server Type Artifacts Folder" - - cp "./Tests/conf.json" "${ARTIFACTS_FOLDER_SERVER_TYPE}/conf.json" - - section_end "Copy conf.json To Server Type Artifacts Folder" - - section_start "Validate Files and Yaml" - - | - ./Tests/scripts/linters_runner.sh - ./Tests/scripts/validate.sh - - section_end "Validate Files and Yaml" - - section_start "Check Spelling" - - python3 ./Tests/scripts/circleci_spell_checker.py $CI_COMMIT_BRANCH - - section_end "Check Spelling" - - section_start "Validate landingPageSections.json" - - echo "Download index.zip" - - INDEX_PATH=$(mktemp) - - | - gcloud auth activate-service-account --key-file="$GCS_MARKET_KEY" >> "${ARTIFACTS_FOLDER}/logs/gcloud_auth.log" 2>&1 - echo "successfully activated google cloud service account" - gsutil cp "gs://marketplace-dist/content/packs/index.zip" $INDEX_PATH - echo "successfully downloaded index.zip" - - echo "successfully downloaded index.zip into $INDEX_PATH" - - - UNZIP_PATH=$(mktemp -d) - - unzip $INDEX_PATH -d $UNZIP_PATH > "${ARTIFACTS_FOLDER}/logs/unzip_index.log" - - - python3 Tests/Marketplace/validate_landing_page_sections.py -i $UNZIP_PATH - - section_end "Validate landingPageSections.json" - - section_start "Revoking GCP Auth" - - gcloud auth revoke "${GCS_ARTIFACTS_ACCOUNT_NAME}" >> "${ARTIFACTS_FOLDER}/logs/gcloud_auth.log" 2>&1 - - section_end "Revoking GCP Auth" - - !reference [ .validate_content_test_conf_branch_merged ] # This section should be the last one in the script, do not move it. - - job-done - -.run-validations-new-validate-flow: - stage: unittests-and-validations - extends: - - .default-job-settings - artifacts: - expire_in: 30 days - paths: - - ${CI_PROJECT_DIR}/artifacts/* - - ${CI_PROJECT_DIR}/pipeline_jobs_folder/* - when: always - script: - - section_start "Validate Files and Yaml" - - | - ./Tests/scripts/linters_runner.sh - ./Tests/scripts/new_validate.sh - - section_end "Validate Files and Yaml" - - !reference [ .validate_content_test_conf_branch_merged ] # This section should be the last one in the script, do not move it. - - job-done - -.jobs-done-check: - stage: are-jobs-really-done - extends: - - .default-job-settings - script: - - python3 Tests/scripts/check_jobs_done.py --triggering-workflow "${WORKFLOW}" --job-done-files "${PIPELINE_JOBS_FOLDER}" - -cloning-content-repo-last-upload-commit: - stage: .pre - rules: - - if: '$NIGHTLY' - artifacts: - expire_in: 30 days - paths: - - ${CI_PROJECT_DIR}/artifacts/* - - ${CI_PROJECT_DIR}/pipeline_jobs_folder/* - when: always - before_script: - - source .gitlab/helper_functions.sh - - *setup-network-certs - - *create_artifacts_and_server_type_instance_folders - - *get_last_upload_commit - variables: - ARTIFACTS_FOLDER: ${CI_PROJECT_DIR}/artifacts - script: - - !reference [.add-content-production-to-artifacts] diff --git a/.gitlab/ci/.gitlab-ci.miscellaneous.yml b/.gitlab/ci/.gitlab-ci.miscellaneous.yml deleted file mode 100644 index 43855f876b84..000000000000 --- a/.gitlab/ci/.gitlab-ci.miscellaneous.yml +++ /dev/null @@ -1,13 +0,0 @@ -delete-mismatched-branches: - tags: - - gke - stage: fan-in - extends: - - .default-job-settings - rules: - - if: '$DELETE_MISMATCHED_BRANCHES == "true"' - when: always - script: - - python3 Utils/delete_mismatched_branches.py - retry: - max: 2 diff --git a/.gitlab/ci/.gitlab-ci.on-push.yml b/.gitlab/ci/.gitlab-ci.on-push.yml deleted file mode 100644 index c297f1f579cf..000000000000 --- a/.gitlab/ci/.gitlab-ci.on-push.yml +++ /dev/null @@ -1,930 +0,0 @@ -# This rule is to not run the build for docker update branches (for non-nightly packs) -.filter-non-nightly-docker-updates-rule: - rules: - - if: '$CI_COMMIT_BRANCH =~ /^demisto\// && $CI_COMMIT_BRANCH !~ /^demisto\/.*-nightly$/' - when: never - -.push-rule: - rules: - - !reference [.filter-non-nightly-docker-updates-rule, rules] - - if: '$CI_PIPELINE_SOURCE =~ /^(push|contrib)$/' - -trigger-private-build: - tags: - - gke - stage: unittests-and-validations - extends: - - .default-job-settings - rules: - - !reference [.filter-non-nightly-docker-updates-rule, rules] - - if: '$CI_COMMIT_BRANCH =~ /pull\/[0-9]+/' - when: never - - if: '$CI_PIPELINE_SOURCE =~ /^(push|contrib)$/' - - if: '$NIGHTLY' - script: - - echo "====Trigger Private Build====" - - | - if [ 'true' = $(./Tests/scripts/check_if_branch_exist.sh -t "${GITHUB_TOKEN}" --repo "demisto/content-private" -b "${CI_COMMIT_BRANCH}") ]; then - PRIVATE_BRANCH_NAME=$CI_COMMIT_BRANCH - else - PRIVATE_BRANCH_NAME='master' - fi - - python3 Utils/trigger_private_build.py --github-token $GITHUB_TOKEN --private-branch-name $PRIVATE_BRANCH_NAME - - sleep 60 - - python3 Utils/get_private_build_status.py --github-token $GITHUB_TOKEN - - job-done - timeout: 2 hours - -.create-release-notes-and-common-docs: - - section_start "Create Release Notes and Common Server Documentation" --collapsed - - echo "Creating Release Notes and Content Descriptor" - - ./Documentation/commonServerDocs.sh - - section_end "Create Release Notes and Common Server Documentation" - -merge-dev-secrets: - tags: - - gke - extends: - - .default-job-settings - variables: - master_branch_name: master - rules: - - !reference [.filter-non-nightly-docker-updates-rule, rules] - - if: '$CI_PIPELINE_SOURCE =~ /^(push|contrib)$/ && $CI_COMMIT_BRANCH == $master_branch_name' - stage: unittests-and-validations - script: - - EXIT_CODE=0 - - !reference [.download-demisto-conf] - - section_start "Merging and deleting dev secrets" - - python3 ./Tests/scripts/merge_and_delete_dev_secrets.py -sa "$GSM_SERVICE_ACCOUNT" --gsm_project_id_dev "$GSM_PROJECT_ID_DEV" --gsm_project_id_prod "$GSM_PROJECT_ID" >> "${ARTIFACTS_FOLDER}/logs/merge_secrets.log" 2>&1 || EXIT_CODE=$? - - job-done - - exit $EXIT_CODE - - section_end "Merging and deleting dev secrets" - allow_failure: true - -stop-running-pipelines: - tags: - - gke - stage: unittests-and-validations - extends: - - .default-job-settings - variables: - master_branch_name: master - rules: - - !reference [.filter-non-nightly-docker-updates-rule, rules] - - if: '$CI_PIPELINE_SOURCE =~ /^(push|contrib)$/ && $CI_COMMIT_BRANCH != $master_branch_name' - script: - - section_start "Stop running pipelines on current branch" - - python3 ./Tests/scripts/stop_running_pipeline.py --creds "${CONTENT_BUILD_GCP}" --zone "${GCP_ZONE}" - - section_end "Stop running pipelines on current branch" - - job-done - -run-pre-commit: - extends: - - .run-pre-commit - cache: - policy: pull-push - rules: - - if: '$BUCKET_UPLOAD == "true"' - when: never - - if: '$SECURITY_SCANS == "true"' - when: never - - if: '$BUILD_MACHINES_CLEANUP == "true"' - when: never - - if: '$FORCE_BUCKET_UPLOAD == "true"' - when: never - - if: '$DEMISTO_TEST_NATIVE_CANDIDATE == "true"' - when: never - - if: '$TRIGGER_CONTRIBUTION_BUILD == "true"' - when: never - - if: '$SDK_RELEASE == "true"' - when: never - # In sdk nighly we run this separately - - if: '$DEMISTO_SDK_NIGHTLY != "true"' - - if: '$CI_PIPELINE_SOURCE =~ /^(push|contrib)$/' - -# runs in gitlab for the on-push flow (except for contributors) -run-validations: - extends: - - .run-validations - rules: - - if: '$NIGHTLY' - - if: '$CI_PIPELINE_SOURCE =~ /^(push|contrib)$/' - -# runs in gitlab for the on-push flow (except for contributors) -run-validations-new-validate-flow: - extends: - - .run-validations-new-validate-flow - rules: - - if: '$NIGHTLY' - - if: '$CI_PIPELINE_SOURCE =~ /^(push|contrib)$/' - - -# runs in gitlab for the on-push flow, on every new commit pushed to the branch. -validate-content-conf: - tags: - - gke - stage: unittests-and-validations - extends: - - .default-job-settings - rules: - - if: '$CI_PIPELINE_SOURCE =~ /^(push|contrib)$/' - script: - - !reference [ .validate_content_test_conf_branch_merged ] - - job-done - -.generic-prepare-testing-bucket: - tags: - - gke - extends: - - .default-job-settings - rules: - - !reference [.filter-non-nightly-docker-updates-rule, rules] - - if: "$CI_PIPELINE_SOURCE =~ /^(push|contrib)$/" - - if: "$NIGHTLY" - cache: - policy: pull-push - variables: - EXTRACT_PRIVATE_TESTDATA: "true" - stage: prepare-testing-bucket - script: - - !reference [.download-demisto-conf] - - !reference [.create-release-notes-and-common-docs] - - !reference [.secrets-fetch] - - section_start "Create or update content graph" --collapsed - - | - echo "set DEMISTO_SDK_GRAPH_FORCE_CREATE to true to create graph from scratch" - export DEMISTO_SDK_GRAPH_FORCE_CREATE=true - echo "DEMISTO_SDK_GRAPH_FORCE_CREATE was set to true to create graph from scratch" - echo $DEMISTO_SDK_GRAPH_FORCE_CREATE - - echo "Staging the repo to include the private packs in the graph" - - git add Packs - - echo "Updating the content graph" - - mkdir "${ARTIFACTS_FOLDER_SERVER_TYPE}/content_graph" - - demisto-sdk update-content-graph -g --marketplace "${MARKETPLACE_VERSION}" -o "${ARTIFACTS_FOLDER_SERVER_TYPE}/content_graph" - - echo "Successfully updated content graph" - - - section_end "Create or update content graph" - - - section_start "Create Content Artifacts and Update Conf" --collapsed - - export DEMISTO_SDK_MARKETPLACE=$MARKETPLACE_VERSION # This is done because the demisto-sdk uses this environment variable. - - | - if [[ $MARKETPLACE_VERSION == "xsoar" || $MARKETPLACE_VERSION == "xsoar_saas" ]]; # later the non xsoar will be edited to remove xsoar naming. - then - echo "Starting to create artifacts with zip for XSOAR." - python Tests/scripts/create_artifacts_graph/create_artifacts.py --marketplace "$MARKETPLACE_VERSION" --artifacts-output "${ARTIFACTS_FOLDER_SERVER_TYPE}/content_packs" --dependencies-output "${ARTIFACTS_FOLDER_SERVER_TYPE}/packs_dependencies.json" --packs-output "${ARTIFACTS_FOLDER_SERVER_TYPE}/packs.json" --bucket-upload "$BUCKET_UPLOAD" - else - echo "Starting to create artifacts without zip." - python Tests/scripts/create_artifacts_graph/create_artifacts.py --marketplace "$MARKETPLACE_VERSION" --artifacts-output "${ARTIFACTS_FOLDER_SERVER_TYPE}/content_packs" --dependencies-output "${ARTIFACTS_FOLDER_SERVER_TYPE}/packs_dependencies.json" --packs-output "${ARTIFACTS_FOLDER_SERVER_TYPE}/packs.json" --bucket-upload "$BUCKET_UPLOAD" --no-zip - fi - - - gcloud auth activate-service-account --key-file="$GCS_ARTIFACTS_KEY" >> "${ARTIFACTS_FOLDER_SERVER_TYPE}/logs/gcloud_auth.log" 2>&1 - - section_end "Create Content Artifacts and Update Conf" - - - section_start "Copy conf.json To Server Type Artifacts Folder" - - cp "./Tests/conf.json" "${ARTIFACTS_FOLDER_SERVER_TYPE}/conf.json" - - section_end "Copy conf.json To Server Type Artifacts Folder" - - - section_start "Find dependencies changes" --collapsed - - | - if [[ -z $BUCKET_UPLOAD || $TEST_UPLOAD == "false" ]]; then - source ./Tests/scripts/get_previous_master_sha.sh - if [[ -z $PREVIOUS_MASTER_SHA ]]; then - echo "WARNING: failed to detect previous master SHA, skipping find dependencies changes" - else - echo "Finding pack dependencies diff against $PREVIOUS_MASTER_SHA" - python Tests/scripts/find_pack_dependencies_changes.py --gitlab-token $GITLAB_API_TOKEN --master-sha $PREVIOUS_MASTER_SHA --job-name $CI_JOB_NAME --artifacts-folder "$ARTIFACTS_FOLDER_SERVER_TYPE" - fi - else - echo "Test upload flow - skipping find dependencies changes" - fi - - section_end "Find dependencies changes" - - - section_start "Replace Cortex XSOAR" --collapsed - - | - if [[ $MARKETPLACE_VERSION == "marketplacev2" || $MARKETPLACE_VERSION == "xpanse" ]]; - then - echo "Replace Cortex XSOAR for non-xsoar build." - pushd "${ARTIFACTS_FOLDER_SERVER_TYPE}" - find content_packs -type f -not \( -path "*/ReleaseNotes/*" \) -exec sed -i -e 's/Cortex XSOAR/'"$PRODUCT_NAME"'/gI' {} \; - pushd content_packs; zip -r ../content_packs.zip * 1> /dev/null; popd - rm -rf content_packs - popd - fi - - section_end "Replace Cortex XSOAR" - - section_start "Collect Tests" --collapsed - - | - if [ -n "${INSTANCE_TESTS}" ]; then - echo "Skipping - not running in INSTANCE_TESTS build" - else - [ -n "${NIGHTLY}" ] && IS_NIGHTLY=true || IS_NIGHTLY=false - [ -n "${DEMISTO_SDK_NIGHTLY}" ] && DEMISTO_SDK_NIGHTLY=true || DEMISTO_SDK_NIGHTLY=false - python3 ./Tests/scripts/collect_tests/collect_tests.py -n $IS_NIGHTLY --sdk-nightly $DEMISTO_SDK_NIGHTLY --marketplace "$MARKETPLACE_VERSION" --service_account $GCS_MARKET_KEY --graph true --override_all_packs $OVERRIDE_ALL_PACKS -up "${PACKS_TO_UPLOAD}" - fi - - section_end "Collect Tests" - - - section_start "Prepare Content Packs for Testing" - - ./Tests/scripts/prepare_content_packs_for_testing.sh "$MARKETPLACE_BUCKET" "$STORAGE_BASE_PATH" "$MARKETPLACE_VERSION" - - section_end "Prepare Content Packs for Testing" - - - section_start "Override and upload core packs versions" - - ./Tests/Marketplace/upload_versions_core_files.sh "$MARKETPLACE_BUCKET" "$STORAGE_BASE_PATH" "$MARKETPLACE_VERSION" "$LAST_UPLOAD_COMMIT" - - section_end "Override and upload core packs versions" - - - section_start "Create Instances for XSOAR" - - | - if [[ ${MARKETPLACE_VERSION} = "xsoar" ]]; then - echo "Creating Instances, only for XSOAR." - [ -n "${TIME_TO_LIVE}" ] && TTL=${TIME_TO_LIVE} || TTL=300 - if [[ "${DEMISTO_SDK_NIGHTLY}" == "true" ]]; then - echo "Creating Xsoar Instances for SDK nightly- changing the filter_envs json." - jq '.' <<< '{"Server 6.9": false, "Server 6.10": false, "Server 6.11": false, "Server 6.12": false, "Server Master": true}' > "${ARTIFACTS_FOLDER_SERVER_TYPE}/filter_envs.json" - fi - python3 ./gcp/create_instance.py --env-type "${IFRA_ENV_TYPE}" --outfile "${ARTIFACTS_FOLDER_SERVER_TYPE}/env_results.json" --filter-envs "${ARTIFACTS_FOLDER_SERVER_TYPE}/filter_envs.json" --creds "${CONTENT_BUILD_GCP}" --zone "${GCP_ZONE}" - fi - - section_end "Create Instances for XSOAR" - - section_start "Upload Artifacts to GCP" --collapsed - - ./Tests/scripts/upload_artifacts.sh - - section_end "Upload Artifacts to GCP" - - echo "create instances done" > "${ARTIFACTS_FOLDER_SERVER_TYPE}/create_instances_done.txt" - - job-done - -xsoar-prepare-testing-bucket: - variables: - PRODUCT_TYPE: "XSOAR" - SERVER_TYPE: "XSOAR" - ARTIFACTS_FOLDER: "${ARTIFACTS_FOLDER_XSOAR}" - ARTIFACTS_FOLDER_SERVER_TYPE: "${ARTIFACTS_FOLDER_XSOAR}/server_type_${SERVER_TYPE}" - MARKETPLACE_VERSION: "xsoar" - MARKETPLACE_BUCKET: "$GCS_MARKET_BUCKET" - extends: - - .generic-prepare-testing-bucket - -xsoar-saas-prepare-testing-bucket: - variables: - PRODUCT_TYPE: "XSOAR" - SERVER_TYPE: "XSOAR SAAS" - ARTIFACTS_FOLDER: "${ARTIFACTS_FOLDER_XSOAR}" - ARTIFACTS_FOLDER_SERVER_TYPE: "${ARTIFACTS_FOLDER_XSOAR}/server_type_${SERVER_TYPE}" - MARKETPLACE_VERSION: "xsoar_saas" - MARKETPLACE_BUCKET: "$GCS_MARKET_XSOAR_SAAS_BUCKET" - extends: - - .generic-prepare-testing-bucket - -mpv2-prepare-testing-bucket: - variables: - PRODUCT_TYPE: "XSIAM" - SERVER_TYPE: "XSIAM" - ARTIFACTS_FOLDER: "${ARTIFACTS_FOLDER_MPV2}" - ARTIFACTS_FOLDER_SERVER_TYPE: "${ARTIFACTS_FOLDER_MPV2}/server_type_${SERVER_TYPE}" - MARKETPLACE_VERSION: "marketplacev2" - MARKETPLACE_BUCKET: "$GCS_MARKET_V2_BUCKET" - PRODUCT_NAME: "Cortex XSIAM" - extends: - - .generic-prepare-testing-bucket - -xpanse-prepare-testing-bucket: - variables: - PRODUCT_TYPE: "XPANSE" - SERVER_TYPE: "XPANSE" - ARTIFACTS_FOLDER: "${ARTIFACTS_FOLDER_XPANSE}" - ARTIFACTS_FOLDER_SERVER_TYPE: "${ARTIFACTS_FOLDER_XPANSE}/server_type_${SERVER_TYPE}" - MARKETPLACE_VERSION: "xpanse" - PRODUCT_NAME: "Cortex XPANSE" - MARKETPLACE_BUCKET: "$GCS_MARKET_XPANSE_BUCKET" - extends: - - .generic-prepare-testing-bucket - -.run_tests: - - section_start "Run Tests" - - | - if [[ -n "${NIGHTLY}" && $SERVER_TYPE == "XSOAR SAAS" ]]; then - ./Tests/scripts/run_tests.sh --generate-empty-result-file - cat "${CLOUD_API_KEYS}" > "cloud_api_keys.json" - ./Tests/scripts/run_e2e_tests.sh || EXIT_CODE=$? - else - ./Tests/scripts/run_e2e_tests.sh --generate-empty-result-file - if [[ -f "${ARTIFACTS_FOLDER_SERVER_TYPE}/conf.json" ]]; then - cp "${ARTIFACTS_FOLDER_SERVER_TYPE}/conf.json" Tests/conf.json - fi - ./Tests/scripts/run_tests.sh || EXIT_CODE=$? - rm -f Tests/conf.json - fi - - section_end "Run Tests" - -.test_content_on_xsoar_server_instances_base: - tags: - - gke - extends: - - .default-job-settings - - .push-rule - variables: - SERVER_TYPE: "XSOAR" - PRODUCT_TYPE: "XSOAR" - ARTIFACTS_FOLDER: "${ARTIFACTS_FOLDER_XSOAR}/" - ARTIFACTS_FOLDER_INSTANCE: "${ARTIFACTS_FOLDER_XSOAR}/instance_${INSTANCE_ROLE}" - ARTIFACTS_FOLDER_SERVER_TYPE: "${ARTIFACTS_FOLDER_XSOAR}/server_type_${SERVER_TYPE}" - ENV_RESULTS_PATH: "${ARTIFACTS_FOLDER_SERVER_TYPE}/env_results.json" - stage: run-instances - needs: - - job: xsoar-prepare-testing-bucket - script: - - EXIT_CODE=0 - - section_start "Check if should run Instance role" - - export INSTANCES_CREATED_FOR_ROLE=$(cat "${ENV_RESULTS_PATH}" | jq -c "map(select(.Role == \"${INSTANCE_ROLE}\")) | length") - - | - echo "Instance role:${INSTANCE_ROLE} Product type:${PRODUCT_TYPE} Instances created for role:${INSTANCES_CREATED_FOR_ROLE}" - if [[ "${INSTANCES_CREATED_FOR_ROLE}" -eq 0 ]]; then - echo "Instances with role ${INSTANCE_ROLE} were not created, not running the instance flow." - rm -f "${ARTIFACTS_FOLDER_INSTANCE}/instance_role.txt" # delete the instance_role.txt file so the job will not be collected by slack notifier. - job-done - exit 0 - fi - - section_end "Check if should run Instance role" - - !reference [.download-demisto-conf] - - !reference [.secrets-fetch] - - !reference [.ssh-config-setup] - - section_start "Wait Until Server Ready" - - | - [ -n "${NIGHTLY}" ] && IS_NIGHTLY=true || IS_NIGHTLY=false - python3 ./Tests/scripts/wait_until_server_ready.py -n ${IS_NIGHTLY} --instance-role "${INSTANCE_ROLE}" - - section_end "Wait Until Server Ready" - - section_start "Copy env results to artifacts folder" --collapsed - - | - # workaround for the hard-coded value in the sdk - cp "${ARTIFACTS_FOLDER_SERVER_TYPE}/env_results.json" "./artifacts/env_results.json" - cp "${ARTIFACTS_FOLDER_SERVER_TYPE}/filter_file.txt" "./artifacts/filter_file.txt" - - section_end "Copy env results to artifacts folder" - - section_start "Install Packs and run Test-Module" - - ./Tests/scripts/install_content_and_test_integrations.sh || EXIT_CODE=$? - - cp -f "${ARTIFACTS_FOLDER_SERVER_TYPE}/conf.json" Tests/conf.json - - section_end "Install Packs and run Test-Module" - - section_start "Wait Until Server Ready" - - echo Going to sleep for 15 minutes to allow server finish indexing - - sleep-with-progress 900 30 "Sleeping... " 150 - - echo "Done sleeping!" - - section_end "Wait Until Server Ready" - - !reference [.run_tests] - - section_start "Get instance ssh-command" - - echo "INSTANCE_ROLE -> ${INSTANCE_ROLE}" - - INSTANCE_NAME=$(jq -r --arg role "$INSTANCE_ROLE" '.[] | select(.Role == $role) | .InstanceName' ./artifacts/env_results.json) - - echo -e "\e[1m gcloud compute ssh --zone \"us-central1-a\" \"${INSTANCE_NAME}\" --tunnel-through-iap --project "xsoar-content-build" \e[0m" - - section_end "Get instance ssh-command" - - section_start "Cleanup env results from artifacts folder" --collapsed - - | - # workaround for the hard-coded value in the sdk - rm -f "./artifacts/env_results.json" - rm -f "./artifacts/filter_file.txt" - - section_end "Cleanup env results from artifacts folder" - - job-done - - exit $EXIT_CODE - after_script: - - !reference [.default-after-script] - - !reference [.install_ssh_keys] - - !reference [.ssh-config-setup] - - !reference [.destroy_xsoar_instances] - artifacts: - when: always - expire_in: 30 days - reports: - junit: - - "${ARTIFACTS_FOLDER_INSTANCE}/test_playbooks_report.xml" - paths: - - "${ARTIFACTS_FOLDER_INSTANCE}/test_playbooks_report.xml" - - ${CI_PROJECT_DIR}/artifacts/* # restoring the default artifacts path from the job default settings - - ${CI_PROJECT_DIR}/pipeline_jobs_folder/* # restoring the default artifacts path from the job default settings - -tests_xsoar_server: - extends: - - .test_content_on_xsoar_server_instances_base - # No need to trigger in case of release branch or docker update branches (non-nightly packs) - rules: - - !reference [.filter-non-nightly-docker-updates-rule, rules] - - if: '$CI_PIPELINE_SOURCE =~ /^(push|contrib)$/' - - if: '$NIGHTLY' - parallel: - matrix: - - INSTANCE_ROLE: - - "Server 6.9" - - "Server 6.10" - - "Server 6.11" - - "Server 6.12" - - "Server Master" - -fan-in-nightly: - tags: - - gke - stage: fan-in - rules: - - if: '$NIGHTLY' - when: always - script: - - echo "fan in nightly" - -jobs-done-check-nightly: - extends: - - .jobs-done-check - needs: - - cloning-content-repo-last-upload-commit - - run-pre-commit - - run-validations - - trigger-private-build - - mpv2-prepare-testing-bucket - - xpanse-prepare-testing-bucket - - xsoar-prepare-testing-bucket - - xsoar-saas-prepare-testing-bucket - - xsiam_server_ga - # - xsoar_ng_server_ga - - tests_xsoar_server - - xsoar-test_playbooks_results - - xsiam-test_playbooks_results - - xsiam-test_modeling_rule_results - # - xsoar-saas_test_e2e_results - tags: - - gke - rules: - - if: '$NIGHTLY' - when: always - variables: - WORKFLOW: 'Content Nightly' - -fan-in-on-push: - when: always - stage: fan-in - rules: - - !reference [.filter-non-nightly-docker-updates-rule, rules] - - if: '$CI_PIPELINE_SOURCE =~ /^(push|contrib)$/ && $CI_COMMIT_BRANCH != $master_branch_name' - tags: - - gke - script: - - echo "fan in on push" - variables: - master_branch_name: master - -jobs-done-check-on-push: - rules: - - !reference [.filter-non-nightly-docker-updates-rule, rules] - - if: '$CI_PIPELINE_SOURCE =~ /^(push|contrib)$/ && $CI_COMMIT_BRANCH != $master_branch_name' - extends: - - .push-rule - - .jobs-done-check - needs: - - run-pre-commit - - run-validations - - stop-running-pipelines - - test-upload-flow - - trigger-private-build - - validate-content-conf - - mpv2-prepare-testing-bucket - - xpanse-prepare-testing-bucket - - xsoar-prepare-testing-bucket - - xsoar-saas-prepare-testing-bucket - - xsiam_server_ga - - tests_xsoar_server - - xsoar_ng_server_ga - - xsoar-test_playbooks_results - - xsiam-test_playbooks_results - - xsiam-test_modeling_rule_results - tags: - - gke - variables: - WORKFLOW: 'Content PR' - master_branch_name: master - -fan-in-on-merge: - when: always - stage: fan-in - rules: - - !reference [.filter-non-nightly-docker-updates-rule, rules] - - if: '$CI_PIPELINE_SOURCE =~ /^(push|contrib)$/ && $CI_COMMIT_BRANCH == $master_branch_name' - tags: - - gke - script: - - echo "fan in on merge" - variables: - master_branch_name: master - -jobs-done-check-on-merge: - rules: - - !reference [ .filter-non-nightly-docker-updates-rule, rules ] - - if: '$CI_PIPELINE_SOURCE =~ /^(push|contrib)$/ && $CI_COMMIT_BRANCH == $master_branch_name' - extends: - - .push-rule - - .jobs-done-check - needs: - - merge-dev-secrets - - run-pre-commit - - run-validations - - test-upload-flow - - trigger-private-build - - validate-content-conf - - mpv2-prepare-testing-bucket - - xpanse-prepare-testing-bucket - - xsoar-prepare-testing-bucket - - xsoar-saas-prepare-testing-bucket - - xsiam_server_ga - - tests_xsoar_server - - xsoar_ng_server_ga - - xsoar-test_playbooks_results - - xsiam-test_playbooks_results - - xsiam-test_modeling_rule_results - tags: - - gke - variables: - WORKFLOW: 'Content Merge' - master_branch_name: master - - -slack-notify-nightly-build: - extends: - - .trigger-slack-notification - rules: - - if: '$NIGHTLY' - when: always - variables: # Passes the environment variable from the parent pipeline to the child which can be useful for cases when triggering pipeline with alternate env variable value passed in the API call. - PIPELINE_TO_QUERY: $CI_PIPELINE_ID - WORKFLOW: 'Content Nightly' - JOB_NAME: 'fan-in-nightly' - SLACK_CHANNEL: $SLACK_CHANNEL - SLACK_JOB: 'true' - SLACK_ALLOW_FAILURE: 'false' - CI_PROJECT_ID: $CI_PROJECT_ID - CI_SERVER_URL: $CI_SERVER_URL - JIRA_SERVER_URL: $JIRA_SERVER_URL - JIRA_VERIFY_SSL: $JIRA_VERIFY_SSL - JIRA_API_KEY: $JIRA_API_KEY - JIRA_PROJECT_ID: $JIRA_PROJECT_ID - JIRA_ISSUE_UNRESOLVED_TRANSITION_NAME: $JIRA_ISSUE_UNRESOLVED_TRANSITION_NAME - -slack-notify-on-push: - rules: - - !reference [.filter-non-nightly-docker-updates-rule, rules] - - if: '$CI_PIPELINE_SOURCE =~ /^(push|contrib)$/ && $CI_COMMIT_BRANCH != $master_branch_name' - when: always - extends: - - .trigger-slack-notification - variables: # Passes the environment variable from the parent pipeline to the child which can be useful for cases when triggering pipeline with alternate env variable value passed in the API call. - PIPELINE_TO_QUERY: $CI_PIPELINE_ID - WORKFLOW: 'Content PR' - master_branch_name: master - JOB_NAME: 'fan-in-on-push' - SLACK_CHANNEL: "#dmst-build-private-" - SLACK_JOB: 'true' - SLACK_ALLOW_FAILURE: 'true' - CI_PROJECT_ID: $CI_PROJECT_ID - CI_SERVER_URL: $CI_SERVER_URL - JIRA_SERVER_URL: $JIRA_SERVER_URL - JIRA_VERIFY_SSL: $JIRA_VERIFY_SSL - JIRA_API_KEY: $JIRA_API_KEY - JIRA_PROJECT_ID: $JIRA_PROJECT_ID - JIRA_ISSUE_UNRESOLVED_TRANSITION_NAME: $JIRA_ISSUE_UNRESOLVED_TRANSITION_NAME - -slack-notify-on-merge: - rules: - - !reference [.filter-non-nightly-docker-updates-rule, rules] - - if: '$CI_PIPELINE_SOURCE =~ /^(push|contrib)$/ && $CI_COMMIT_BRANCH == $master_branch_name' - when: always - extends: - - .trigger-slack-notification - variables: # Passes the environment variable from the parent pipeline to the child which can be useful for cases when triggering pipeline with alternate env variable value passed in the API call. - PIPELINE_TO_QUERY: $CI_PIPELINE_ID - WORKFLOW: 'Content Merge' - master_branch_name: master - JOB_NAME: 'fan-in-on-merge' - SLACK_CHANNEL: "#dmst-build-test" - SLACK_JOB: 'true' - SLACK_ALLOW_FAILURE: 'false' - -.test_content_on_cloud_server_instances_base: - tags: - - gke - - us-west1 - extends: - - .default-job-settings - - .push-rule - variables: - EXTRACT_PRIVATE_TESTDATA: "true" - stage: run-instances - script: - - EXIT_CODE=0 - - !reference [.download-demisto-conf] - - !reference [.secrets-fetch] - - section_start "Are there tests to run?" --collapsed - - | - if ! [[ -s "${ARTIFACTS_FOLDER_SERVER_TYPE}/content_packs_to_install.txt" || -s "${ARTIFACTS_FOLDER_SERVER_TYPE}/filter_file.txt" ]]; then - # The files are empty. - echo "Not running the instance flow, no tests to run were found." - ./Tests/scripts/run_tests.sh --generate-empty-result-file - ./Tests/scripts/test_modeling_rules.sh --generate-empty-result-file - ./Tests/scripts/run_e2e_tests.sh --generate-empty-result-file - job-done - exit $EXIT_CODE - fi - # workaround for the hard-coded value in the sdk - - cp "${ARTIFACTS_FOLDER_SERVER_TYPE}/filter_file.txt" "./artifacts/filter_file.txt" - - section_end "Are there tests to run?" - - - !reference [.lock-machine] - - !reference [.uninstall-packs-and-reset-bucket-cloud] - - - section_start "Install Packs and run Test-Module" - - ./Tests/scripts/install_content_and_test_integrations.sh || EXIT_CODE=$? - - section_end "Install Packs and run Test-Module" - - !reference [.run_tests] - - section_start "Test Modeling Rules" - - ./Tests/scripts/test_modeling_rules.sh || EXIT_CODE=$? - - section_end "Test Modeling Rules" - - - section_start "Packs re-installation test" - - | - if [[ -z "${NIGHTLY}" && -s "${ARTIFACTS_FOLDER_SERVER_TYPE}/packs_reinstall_to_test.txt" ]]; then - echo "Running the packs re-installation test." - ./Tests/scripts/reinstall_packs_on_cloud_instances.sh || EXIT_CODE=$? - fi - - section_end "Packs re-installation test" - - - !reference [.cloud-machine-information] - - section_start "Cleanup env results from artifacts folder" --collapsed - - | - # workaround for the hard-coded value in the sdk - rm -f "./artifacts/filter_file.txt" - - section_end "Cleanup env results from artifacts folder" - - - job-done - - exit $EXIT_CODE - after_script: - - source .gitlab/helper_functions.sh - - !reference [.unlock-machine] - - -xsiam_server_ga: - extends: - - .test_content_on_cloud_server_instances_base - rules: - - !reference [.filter-non-nightly-docker-updates-rule, rules] - - if: '$CI_PIPELINE_SOURCE =~ /^(push|contrib)$/' - - if: '$NIGHTLY' - when: always - variables: - CLOUD_MACHINES_TYPE: "nightly" - CLOUD_MACHINES_COUNT: 1 - GCS_LOCKS_PATH: "content-locks/locks-xsiam-ga-nightly" - timeout: 12 hours - variables: - CLOUD_MACHINES_TYPE: "build" - CLOUD_MACHINES_COUNT: 1 - INSTANCE_ROLE: "XSIAM" - SERVER_TYPE: "XSIAM" - PRODUCT_TYPE: "XSIAM" - GCS_QUEUE_FILE: "queue-ga" - GCS_LOCKS_PATH: "content-locks/locks-xsiam-ga" - CLOUD_SERVERS_FILE: "xsiam_servers_path" - CLOUD_API_KEYS: $XSIAM_API_KEYS - CLOUD_API_TOKENS: $XSIAM_TOKENS - ARTIFACTS_FOLDER: "${ARTIFACTS_FOLDER_MPV2}" - ARTIFACTS_FOLDER_INSTANCE: "${ARTIFACTS_FOLDER_MPV2}/instance_xsiam" - ARTIFACTS_FOLDER_SERVER_TYPE: "${ARTIFACTS_FOLDER_MPV2}/server_type_${SERVER_TYPE}" - ENV_RESULTS_PATH: "${ARTIFACTS_FOLDER_SERVER_TYPE}/env_results.json" - GCS_MARKET_BUCKET: "${GCS_MARKET_V2_BUCKET}" - GCS_SOURCE_BUCKET: "$GCS_PRODUCTION_V2_BUCKET" - GCS_MACHINES_BUCKET: "marketplace-v2-dist-dev/upload-flow/builds-xsiam" - MARKETPLACE_NAME: "marketplacev2" - NON_REMOVABLE_PACKS: "Base" - needs: - - job: mpv2-prepare-testing-bucket - optional: true - artifacts: - when: always - expire_in: 30 days - reports: - junit: - - "${ARTIFACTS_FOLDER_INSTANCE}/test_modeling_rules_report.xml" - - "${ARTIFACTS_FOLDER_INSTANCE}/test_playbooks_report.xml" - paths: - - "${ARTIFACTS_FOLDER_INSTANCE}/test_modeling_rules_report.xml" - - "${ARTIFACTS_FOLDER_INSTANCE}/test_playbooks_report.xml" - - ${CI_PROJECT_DIR}/artifacts/* # restoring the default artifacts path from the job default settings - - ${CI_PROJECT_DIR}/pipeline_jobs_folder/* # restoring the default artifacts path from the job default settings - -xsoar_ng_server_ga: - extends: - - .test_content_on_cloud_server_instances_base - rules: - - !reference [.filter-non-nightly-docker-updates-rule, rules] - - if: '$CI_PIPELINE_SOURCE =~ /^(push|contrib)$/' - - if: '$NIGHTLY' - when: never - variables: - CLOUD_MACHINES_TYPE: "build" - CLOUD_MACHINES_COUNT: 1 - INSTANCE_ROLE: "XSOAR SAAS" - PRODUCT_TYPE: "XSOAR" - SERVER_TYPE: "XSOAR SAAS" - GCS_QUEUE_FILE: "queue-ga" - GCS_LOCKS_PATH: "content-locks/locks-xsoar-ng" - CLOUD_SERVERS_FILE: "xsoar_ng_servers_path" - CLOUD_API_KEYS: $XSOAR_NG_API_KEYS - CLOUD_API_TOKENS: $XSIAM_TOKENS - ARTIFACTS_FOLDER: "${ARTIFACTS_FOLDER_XSOAR}" - ARTIFACTS_FOLDER_INSTANCE: "${ARTIFACTS_FOLDER_XSOAR}/instance_saas" - ARTIFACTS_FOLDER_SERVER_TYPE: "${ARTIFACTS_FOLDER_XSOAR}/server_type_${SERVER_TYPE}" - ENV_RESULTS_PATH: "${ARTIFACTS_FOLDER_SERVER_TYPE}/env_results.json" - GCS_SOURCE_BUCKET: "${GCS_PRODUCTION_XSOAR_SAAS_BUCKET}" - GCS_MACHINES_BUCKET: "marketplace-saas-dist-dev/upload-flow/builds-xsoar-ng" - MARKETPLACE_NAME: "xsoar_saas" - NON_REMOVABLE_PACKS: "Base" - needs: - - job: xsoar-saas-prepare-testing-bucket - artifacts: - when: always - expire_in: 30 days - reports: - junit: - - "${ARTIFACTS_FOLDER_INSTANCE}/test_playbooks_report.xml" - - "${ARTIFACTS_FOLDER_INSTANCE}/e2e_tests_result.xml" - paths: - - "${ARTIFACTS_FOLDER_INSTANCE}/test_playbooks_report.xml" - - "${ARTIFACTS_FOLDER_INSTANCE}/e2e_tests_result.xml" - - ${CI_PROJECT_DIR}/artifacts/* # restoring the default artifacts path from the job default settings - - ${CI_PROJECT_DIR}/pipeline_jobs_folder/* # restoring the default artifacts path from the job default settings - -test-upload-flow: - tags: - - gke - extends: - - .default-job-settings - rules: - - !reference [.filter-non-nightly-docker-updates-rule, rules] - - if: '$CI_PIPELINE_SOURCE =~ /^(push|contrib)$/' - variables: - ALL_BUCKETS: "$GCS_MARKET_BUCKET_DEV,$GCS_MARKET_V2_BUCKET_DEV,$GCS_MARKET_XSOAR_SAAS_BUCKET_DEV" - stage: unittests-and-validations - script: - - section_start "Checks Whether to Trigger a Test Upload" - - SHOULD_SKIP_TEST_UPLOAD=$(./Utils/should_trigger_test_upload.sh) - - if [ -z "$SHOULD_SKIP_TEST_UPLOAD" ]; then - - echo "No upload-flow related files were modified, skipping upload test" - - job-done - - exit 0 - - fi - - echo "Found modified files that should be tested in upload-flow" - - section_end "Checks Whether to Trigger a Test Upload" - - - section_start "Create Testing Branch" - - export BRANCH="${CI_COMMIT_BRANCH}-upload_test_branch-${CI_PIPELINE_ID}" - - echo "${BRANCH}" > "${ARTIFACTS_FOLDER}/test_upload_flow_branch.txt" - - python3 ./Utils/test_upload_flow/create_test_branch.py -tb "${BRANCH}" -a "${ARTIFACTS_FOLDER}" -g "${GITLAB_PUSH_TOKEN}" - - - echo "Created test branch:${BRANCH}" - - section_end "Create Testing Branch" - - - section_start "Trigger Test Upload Flow On Testing Branch" - # retry mechanism for trigger upload pipeline in case it failed because of gitlab connectivity issues. - - for _ in {1..3}; do - - export pipeline_id=$(./Utils/trigger_test_upload_flow.sh -ct "${GITLAB_SVC_USER_TOKEN}" -b "${BRANCH}" -dz | jq .id) - - if [ "${pipeline_id}" != "null" ]; then - - break - - fi - - echo "Sleeping for 10 seconds before retrying" - - sleep 10 - - done - - echo "Successful triggered test upload - ${CI_SERVER_URL}/${CI_PROJECT_NAMESPACE}/content/-/pipelines/$pipeline_id" - - section_end "Trigger Test Upload Flow On Testing Branch" - - - section_start "Wait For Upload To Finish" - - python3 ./Utils/test_upload_flow/wait_for_upload.py -p $pipeline_id -g $GITLAB_API_TOKEN - - section_end "Wait For Upload To Finish" - - - section_start "Verify Created Testing Bucket" - - current_storage_base_path="upload-flow/builds/$BRANCH/$pipeline_id/content/packs" - - python3 ./Utils/test_upload_flow/verify_bucket.py -a "${ARTIFACTS_FOLDER}" -s $GCS_MARKET_KEY -sb $current_storage_base_path -b $ALL_BUCKETS - - section_end "Verify Created Testing Bucket" - - job-done - after_script: - - !reference [.default-after-script] - - section_start "Delete Testing Branch" - - | - if [ -f "${ARTIFACTS_FOLDER}/test_upload_flow_branch.txt" ]; then - BRANCH=$(cat "${ARTIFACTS_FOLDER}/test_upload_flow_branch.txt") - python3 ./Utils/test_upload_flow/delete_test_branch.py -tb "${BRANCH}" -g "${GITLAB_PUSH_TOKEN}" - fi - - section_end "Delete Testing Branch" - -.server_test_playbooks_results: - stage: results - rules: - - !reference [.filter-non-nightly-docker-updates-rule, rules] - - if: '$CI_PIPELINE_SOURCE =~ /^(push|contrib)$/' - - if: '$NIGHTLY' - when: always - extends: - - .default-job-settings - script: - - ./Tests/scripts/test_playbooks_results.sh - - job-done - artifacts: - when: always - expire_in: 30 days - reports: - junit: - - "${ARTIFACTS_FOLDER}/test_playbooks_report.xml" - paths: - - "${ARTIFACTS_FOLDER}/test_playbooks_report.xml" - - ${CI_PROJECT_DIR}/artifacts/* # restoring the default artifacts path from the job default settings - - ${CI_PROJECT_DIR}/pipeline_jobs_folder/* # restoring the default artifacts path from the job default settings - - -.e2e_test_results: - stage: results - rules: - - !reference [ .filter-non-nightly-docker-updates-rule, rules ] - - if: '$NIGHTLY' - when: never - extends: - - .default-job-settings - needs: - - job: xsoar_ng_server_ga - optional: true - script: - - ./Tests/scripts/test_e2e_results.sh - - job-done - artifacts: - when: always - expire_in: 30 days - reports: - junit: - - "${ARTIFACTS_FOLDER}/e2e_tests_result.xml" - paths: - - "${ARTIFACTS_FOLDER}/e2e_tests_result.xml" - - ${CI_PROJECT_DIR}/artifacts/* # restoring the default artifacts path from the job default settings - - ${CI_PROJECT_DIR}/pipeline_jobs_folder/* # restoring the default artifacts path from the job default settings - - -.test_modeling_rule_results: - stage: results - rules: - - !reference [.filter-non-nightly-docker-updates-rule, rules] - - if: '$CI_PIPELINE_SOURCE =~ /^(push|contrib)$/' - - if: '$NIGHTLY' - when: always - extends: - - .default-job-settings - script: - - ./Tests/scripts/test_modeling_rule_results.sh - - job-done - -xsoar-test_playbooks_results: - variables: - SERVER_TYPE: "XSOAR" - PRODUCT_TYPE: "XSOAR" - ARTIFACTS_FOLDER: "${ARTIFACTS_FOLDER_XSOAR}" - JIRA_ISSUE_TYPE: "XSOAR Dev Bug" - JIRA_COMPONENT: "Test Failure" - JIRA_LABELS: '["XSOAR", "nightly"]' - extends: .server_test_playbooks_results - needs: - - job: tests_xsoar_server - - job: xsoar_ng_server_ga - optional: true - dependencies: - - tests_xsoar_server - - xsoar_ng_server_ga - -xsiam-test_playbooks_results: - variables: - SERVER_TYPE: "XSIAM" - PRODUCT_TYPE: "XSIAM" - ARTIFACTS_FOLDER: "${ARTIFACTS_FOLDER_MPV2}" - JIRA_ISSUE_TYPE: "XSOAR Dev Bug" - JIRA_COMPONENT: "Test Failure" - JIRA_LABELS: '["XSIAM", "nightly"]' - extends: .server_test_playbooks_results - needs: - - xsiam_server_ga - dependencies: - - xsiam_server_ga - -xsoar-saas_test_e2e_results: - variables: - SERVER_TYPE: "XSOAR" - PRODUCT_TYPE: "XSOAR SAAS" - ARTIFACTS_FOLDER: "${ARTIFACTS_FOLDER_XSOAR}" - extends: .e2e_test_results - needs: - - xsoar_ng_server_ga - dependencies: - - xsoar_ng_server_ga - -xsiam-test_modeling_rule_results: - variables: - SERVER_TYPE: "XSIAM" - PRODUCT_TYPE: "XSIAM" - ARTIFACTS_FOLDER: "${ARTIFACTS_FOLDER_MPV2}" - JIRA_COMPONENT: "Test Modeling Failure" - JIRA_ISSUE_TYPE: "XSOAR Dev Bug" - JIRA_LABELS: '["XSIAM", "nightly"]' - extends: .test_modeling_rule_results - needs: - - xsiam_server_ga - dependencies: - - xsiam_server_ga diff --git a/.gitlab/ci/.gitlab-ci.sdk-nightly.yml b/.gitlab/ci/.gitlab-ci.sdk-nightly.yml deleted file mode 100644 index 19ea630e407d..000000000000 --- a/.gitlab/ci/.gitlab-ci.sdk-nightly.yml +++ /dev/null @@ -1,537 +0,0 @@ -.sdk-nightly-schedule-rule: - rules: - - if: '$CI_PIPELINE_SOURCE =~ /^(schedule|trigger)$/ && $DEMISTO_SDK_NIGHTLY == "true"' - -# used for jobs which we want to run in a pipeline even when previous jobs in the pipeline fail e.g. Slack notification -.sdk-nightly-schedule-rule-always: - rules: - - if: '$CI_PIPELINE_SOURCE =~ /^(schedule|trigger)$/ && $DEMISTO_SDK_NIGHTLY == "true"' - when: always - -.change-file-ids: &change-file-ids - - python3 ./Tests/scripts/sdk_nightly_change_json_file_fields.py Packs/HelloWorld/Classifiers/classifier-mapper-incoming-HelloWorldTest.json name - -.upload-entities-to-cortex-xsoar: &upload-entities-to-cortex-xsoar - - section_start "Upload Entities to Cortex XSOAR" --collapsed - - demisto-sdk upload -i Packs/HelloWorld/Integrations/ --insecure - - demisto-sdk upload -i Packs/HelloWorld/TestPlaybooks/playbook-HelloWorld-Test.yml --insecure - - demisto-sdk upload -i Packs/HelloWorld/Layouts/layoutscontainer-Hello_World_Test_Layout.json --insecure - - demisto-sdk upload -i Packs/HelloWorld/IncidentFields/incidentfield-Hello_World_IncidentField_Test.json --insecure - - demisto-sdk upload -i Packs/HelloWorld/IncidentTypes/incidenttype-Hello_World_Alert_Test.json --insecure - - demisto-sdk upload -i Packs/HelloWorld/Classifiers/classifier-mapper-incoming-HelloWorldTest.json --insecure - - section_end "Upload Entities to Cortex XSOAR" - -.run_test_content: &run_test_content - - section_start "Run test-content" --collapsed - - | - if [[ -f "${ARTIFACTS_FOLDER_SERVER_TYPE}/conf.json" ]]; then - cp "${ARTIFACTS_FOLDER_SERVER_TYPE}/conf.json" Tests/conf.json - fi - - | - ./Tests/scripts/run_tests.sh || EXIT_CODE=$? - - rm -f Tests/conf.json - - section_end "Run test-content" - -.run_end_to_end_tests: &run_end_to_end_tests - - section_start "End to End Tests" --collapsed - - | - mkdir $ARTIFACTS_FOLDER/demisto-sdk - git clone -b ${SDK_REF} --single-branch --depth 1 https://github.com/demisto/demisto-sdk.git $ARTIFACTS_FOLDER/demisto-sdk - python3 -m pytest "$ARTIFACTS_FOLDER/demisto-sdk/tests_end_to_end/${E2E_TEST_FOLDER}" || EXIT_CODE=$? - - section_end "End to End Tests" - -.copy_env_results: ©_env_results - - section_start "Copy env results to artifacts folder" --collapsed - - | - # workaround for the hard-coded value in the sdk CIAC-9091 - if [[ -e "${ARTIFACTS_FOLDER_SERVER_TYPE}/env_results.json" ]]; then - cp "${ARTIFACTS_FOLDER_SERVER_TYPE}/env_results.json" "./artifacts/env_results.json" - fi - if [[ -e "${ARTIFACTS_FOLDER_SERVER_TYPE}/filter_file.txt" ]]; then - cp "${ARTIFACTS_FOLDER_SERVER_TYPE}/filter_file.txt" "./artifacts/filter_file.txt" - fi - - section_end "Copy env results to artifacts folder" - -.prepare_for_test_content: &prepare_for_test_content - - section_start "Install Packs" --collapsed - - cat "${ARTIFACTS_FOLDER_SERVER_TYPE}/filter_file.txt" - - ./Tests/scripts/install_content_and_test_integrations.sh || EXIT_CODE=$? - - cp -f "${ARTIFACTS_FOLDER_SERVER_TYPE}/conf.json" Tests/conf.json - - section_end "Install Packs" - -.assert_test_content: &assert_test_content - - section_start "Assert test-content" --collapsed - - num_test_that_ran=$(grep -c "Test" "${ARTIFACTS_FOLDER_INSTANCE}/succeeded_tests.txt") - - num_tests_that_should_run=$(grep -c "Test" "${ARTIFACTS_FOLDER_SERVER_TYPE}/filter_file.txt") - - echo "Number of test-playbook tested - $num_test_that_ran " - - echo "Number of tests that should have been tested - $num_tests_that_should_run " - - | - if [ $num_test_that_ran -eq $num_tests_that_should_run ]; then - echo "test-content ran all of the collected test-playbooks." - else - echo "test-content did not ran all of the test-playbooks that were collected. exit with exit code 1" - EXIT_CODE=1 - fi - - section_end "Assert test-content" - -.cleanup_after_test_content: &cleanup_after_test_content - - section_start "Cleanup env results from artifacts folder" --collapsed - - | - # workaround for the hard-coded value in the sdk CIAC-9091 - if [[ -e "./artifacts/env_results.json" ]]; then - rm -f "./artifacts/env_results.json" - fi - if [[ -e "./artifacts/filter_file.txt" ]]; then - rm -f "./artifacts/filter_file.txt" - fi - - section_end "Cleanup env results from artifacts folder" - -demisto-sdk-nightly:run-pre-commit: - extends: - - .run-pre-commit - - .sdk-nightly-schedule-rule - -demisto-sdk-nightly:run-validations: - extends: - - .run-validations - - .sdk-nightly-schedule-rule - -demisto-sdk-nightly:run-validations-new-validate-flow: - extends: - - .run-validations-new-validate-flow - - .sdk-nightly-schedule-rule - -demisto_sdk_nightly:check_idset_dependent_commands: - tags: - - gke - extends: - - .default-job-settings - - .sdk-nightly-schedule-rule - stage: unittests-and-validations - needs: [] - inherit: - variables: true - variables: - IS_NIGHTLY: "false" - PRODUCT_TYPE: "XSOAR" - SERVER_TYPE: "XSOAR" - INSTANCE_ROLE: "Server Master" - ARTIFACTS_FOLDER: "${ARTIFACTS_FOLDER_XSOAR}" - ARTIFACTS_FOLDER_INSTANCE: "${ARTIFACTS_FOLDER}/instance_${INSTANCE_ROLE}" - ARTIFACTS_FOLDER_SERVER_TYPE: "${ARTIFACTS_FOLDER}/server_type_${SERVER_TYPE}" - ENV_RESULTS_PATH: "${ARTIFACTS_FOLDER_SERVER_TYPE}/env_results.json" - script: - - section_start "Secrets Detection" --collapsed - - demisto-sdk secrets --post-commit --ignore-entropy - - section_end "Secrets Detection" - - section_start "Update Conf" --collapsed - - cp "./Tests/conf.json" "${ARTIFACTS_FOLDER_SERVER_TYPE}/conf.json" # workaround for the hard-coded value in the sdk - - section_end "Update Conf" - - !reference [.create-id-set] - - section_start "Activate GCloud Service Account" --collapsed - - gcloud auth activate-service-account --key-file="$GCS_MARKET_KEY" >> "${ARTIFACTS_FOLDER}/logs/gcloud_auth.log" 2>&1 - - echo "successfully activated google cloud service account" - - section_end "Activate GCloud Service Account" - - section_start "Download private ID set" --collapsed - - gsutil cp "gs://marketplace-dist/content/private_id_set.json" "${ARTIFACTS_FOLDER_SERVER_TYPE}/unified_id_set.json" - - echo "successfully downloaded private ID set" - - section_end "Download private ID set" - - section_start "Revoking GCP Auth" - - gcloud auth revoke "${GCS_ARTIFACTS_ACCOUNT_NAME}" >> "${ARTIFACTS_FOLDER}/logs/gcloud_auth.log" 2>&1 - - section_end "Revoking GCP Auth" - - section_start "Merge public and private ID sets" --collapsed - - demisto-sdk merge-id-sets -i1 "${ARTIFACTS_FOLDER_SERVER_TYPE}/id_set.json" -i2 "${ARTIFACTS_FOLDER_SERVER_TYPE}/unified_id_set.json" -o "${ARTIFACTS_FOLDER_SERVER_TYPE}/unified_id_set.json" - - echo "successfully merged public and private ID sets" - - section_end "Merge public and private ID sets" - - section_start "Common Server Documentation" --collapsed - - ./Documentation/commonServerDocs.sh - - section_end "Common Server Documentation" - - section_start "Collect Test List and Content Packs" --collapsed - - python3 ./Tests/scripts/collect_tests/collect_tests.py -n "${IS_NIGHTLY}" --sdk-nightly "${DEMISTO_SDK_NIGHTLY}" - - section_end "Collect Test List and Content Packs" - - section_start "Calculate Packs Dependencies" --collapsed - - demisto-sdk find-dependencies -idp "${ARTIFACTS_FOLDER_SERVER_TYPE}/id_set.json" --output-path "${ARTIFACTS_FOLDER_SERVER_TYPE}/packs_dependencies.json" --all-packs-dependencies - - section_end "Calculate Packs Dependencies" - - section_start "Cleanup env results from artifacts folder" --collapsed - - job-done - -demisto-sdk-nightly:xsoar-prepare-testing-bucket: - tags: - - gke - extends: - - .default-job-settings - - .sdk-nightly-schedule-rule - variables: - PRODUCT_TYPE: "XSOAR" - SERVER_TYPE: "XSOAR" - INSTANCE_ROLE: "Server Master" - ARTIFACTS_FOLDER: "${ARTIFACTS_FOLDER_XSOAR}" - ARTIFACTS_FOLDER_INSTANCE: "${ARTIFACTS_FOLDER_XSOAR}/instance_${INSTANCE_ROLE}" - ARTIFACTS_FOLDER_SERVER_TYPE: "${ARTIFACTS_FOLDER_XSOAR}/server_type_${SERVER_TYPE}" - IFRA_ENV_TYPE: "Server Master" - MARKETPLACE_VERSION: "xsoar" - MARKETPLACE_BUCKET: "$GCS_MARKET_BUCKET" - cache: - policy: pull-push - needs: [] - stage: prepare-testing-bucket - script: - - !reference [.generic-prepare-testing-bucket, script] - - job-done - -demisto-sdk-nightly:mpv2-prepare-testing-bucket: - tags: - - gke - extends: - - .default-job-settings - - .sdk-nightly-schedule-rule - variables: - PRODUCT_TYPE: "XSIAM" - SERVER_TYPE: "XSIAM" - INSTANCE_ROLE: "XSIAM" - ARTIFACTS_FOLDER: "${ARTIFACTS_FOLDER_MPV2}" - ARTIFACTS_FOLDER_INSTANCE: "${ARTIFACTS_FOLDER_MPV2}/instance_${INSTANCE_ROLE}" - ARTIFACTS_FOLDER_SERVER_TYPE: "${ARTIFACTS_FOLDER_MPV2}/server_type_${SERVER_TYPE}" - MARKETPLACE_VERSION: "marketplacev2" - MARKETPLACE_BUCKET: "$GCS_MARKET_V2_BUCKET" - PRODUCT_NAME: "Cortex XSIAM" - IFRA_ENV_TYPE: "Server Master" - cache: - policy: pull-push - needs: [] - stage: prepare-testing-bucket - script: - - !reference [.generic-prepare-testing-bucket, script] - - job-done - -demisto-sdk-nightly:xpanse-prepare-testing-bucket: - tags: - - gke - extends: - - .default-job-settings - - .sdk-nightly-schedule-rule - variables: - PRODUCT_TYPE: "XPANSE" - SERVER_TYPE: "XPANSE" - ARTIFACTS_FOLDER: "${ARTIFACTS_FOLDER_XPANSE}" - ARTIFACTS_FOLDER_SERVER_TYPE: "${ARTIFACTS_FOLDER_XPANSE}/server_type_${SERVER_TYPE}" - MARKETPLACE_VERSION: "xpanse" - PRODUCT_NAME: "Cortex XPANSE" - MARKETPLACE_BUCKET: "$GCS_MARKET_XPANSE_BUCKET" - IFRA_ENV_TYPE: "Server Master" - cache: - policy: pull-push - needs: [] - stage: prepare-testing-bucket - script: - - !reference [.generic-prepare-testing-bucket, script] - - job-done - -demisto-sdk-nightly:xsoar-saas-prepare-testing-bucket: - tags: - - gke - extends: - - .default-job-settings - - .sdk-nightly-schedule-rule - variables: - IFRA_ENV_TYPE: "Server Master" - PRODUCT_TYPE: "XSOAR" - SERVER_TYPE: "XSOAR SAAS" - ARTIFACTS_FOLDER: "${ARTIFACTS_FOLDER_XSOAR}" - ARTIFACTS_FOLDER_SERVER_TYPE: "${ARTIFACTS_FOLDER_XSOAR}/server_type_${SERVER_TYPE}" - MARKETPLACE_VERSION: "xsoar_saas" - MARKETPLACE_BUCKET: "$GCS_MARKET_XSOAR_SAAS_BUCKET" - cache: - policy: pull-push - needs: [] - stage: prepare-testing-bucket - script: - - !reference [.generic-prepare-testing-bucket, script] - - job-done - -demisto-sdk-nightly:test-upload-flow: - tags: - - gke - extends: - - .default-job-settings - - .sdk-nightly-schedule-rule - variables: - ALL_BUCKETS: "$GCS_MARKET_BUCKET_DEV,$GCS_MARKET_V2_BUCKET_DEV,$GCS_MARKET_XSOAR_SAAS_BUCKET_DEV" - needs: [] - stage: unittests-and-validations - script: - - !reference [test-upload-flow, script] - -demisto-sdk-nightly:run-end-to-end-tests-general: - tags: - - gce # can't run docker in docker on gke - extends: - - .default-job-settings - - .sdk-nightly-schedule-rule-always - services: # needed for running lint - - name: ${DOCKER_IO}/library/docker:20.10.12-dind - alias: docker - variables: - DOCKER_HOST: tcp://docker:2375 - DOCKER_DRIVER: overlay2 - DOCKER_TLS_CERTDIR: "" - PRODUCT_TYPE: "XSOAR" - SERVER_TYPE: "XSOAR" - INSTANCE_ROLE: "Server Master" - ARTIFACTS_FOLDER: "${ARTIFACTS_FOLDER_XSOAR}" - ARTIFACTS_FOLDER_INSTANCE: "${ARTIFACTS_FOLDER}/instance_${INSTANCE_ROLE}" - ARTIFACTS_FOLDER_SERVER_TYPE: "${ARTIFACTS_FOLDER}/server_type_${SERVER_TYPE}" - ENV_RESULTS_PATH: "${ARTIFACTS_FOLDER_SERVER_TYPE}/env_results.json" - E2E_TEST_FOLDER: "general" - needs: ["demisto-sdk-nightly:xsoar-prepare-testing-bucket"] - stage: run-instances - script: - - EXIT_CODE=0 - - !reference [.ssh-config-setup] - - - section_start "End to End Tests" --collapsed - - | - mkdir $ARTIFACTS_FOLDER/demisto-sdk - git clone -b ${SDK_REF} --single-branch --depth 1 https://github.com/demisto/demisto-sdk.git $ARTIFACTS_FOLDER/demisto-sdk - - python3 -m pytest ${ARTIFACTS_FOLDER}/demisto-sdk/tests_end_to_end/${E2E_TEST_FOLDER} || EXIT_CODE=$? - - section_end "End to End Tests" - - - job-done - - exit $EXIT_CODE - after_script: - - !reference [.default-after-script] - -demisto-sdk-nightly:run-end-to-end-tests-xsoar: - tags: - - gce # can't run docker in docker on gke - extends: - - .default-job-settings - - .sdk-nightly-schedule-rule-always - services: # needed for running e2e tests for docker - - name: ${DOCKER_IO}/library/docker:20.10.12-dind - alias: docker - variables: - DOCKER_HOST: tcp://docker:2375 - DOCKER_DRIVER: overlay2 - DOCKER_TLS_CERTDIR: "" - PRODUCT_TYPE: "XSOAR" - SERVER_TYPE: "XSOAR" - INSTANCE_ROLE: "Server Master" - ARTIFACTS_FOLDER: "${ARTIFACTS_FOLDER_XSOAR}" - ARTIFACTS_FOLDER_INSTANCE: "${ARTIFACTS_FOLDER}/instance_${INSTANCE_ROLE}" - ARTIFACTS_FOLDER_SERVER_TYPE: "${ARTIFACTS_FOLDER}/server_type_${SERVER_TYPE}" - ENV_RESULTS_PATH: "${ARTIFACTS_FOLDER_SERVER_TYPE}/env_results.json" - FAIL_ON_ERROR: "true" - E2E_TEST_FOLDER: "xsoar" - needs: ["demisto-sdk-nightly:xsoar-prepare-testing-bucket"] - stage: run-instances - script: - - EXIT_CODE=0 - - !reference [.ssh-config-setup] - - - !reference [.copy_env_results] - - - section_start "Wait Until Server Ready" --collapsed - - | - [ -n "${NIGHTLY}" ] && IS_NIGHTLY=true || IS_NIGHTLY=false - python3 ./Tests/scripts/wait_until_server_ready.py -n ${IS_NIGHTLY} --instance-role "${INSTANCE_ROLE}" || EXIT_CODE=$? - - section_end "Wait Until Server Ready" - - - !reference [.download-demisto-conf] - - !reference [.secrets-fetch] - - - section_start "End to End Tests" --collapsed - - | - mkdir $ARTIFACTS_FOLDER/demisto-sdk - git clone -b ${SDK_REF} --single-branch --depth 1 https://github.com/demisto/demisto-sdk.git $ARTIFACTS_FOLDER/demisto-sdk - - unset DEMISTO_API_KEY - export DEMISTO_BASE_URL="https://$(cat "${ENV_RESULTS_PATH}" | jq -r '.[0].InstanceDNS')" - echo "Server URL: $DEMISTO_BASE_URL" - python3 -m pytest ${ARTIFACTS_FOLDER}/demisto-sdk/tests_end_to_end/${E2E_TEST_FOLDER} || EXIT_CODE=$? - - section_end "End to End Tests" - - - !reference [.prepare_for_test_content] - - - section_start "Wait Until Server Ready" --collapsed - - echo Going to sleep for 15 minutes to allow server finish indexing - - sleep-with-progress 900 30 "Sleeping... " 150 - - echo "Done sleeping!" - - section_end "Wait Until Server Ready" - - - !reference [.run_test_content] - - - !reference [.assert_test_content] - - - !reference [.cleanup_after_test_content] - - - job-done - - exit $EXIT_CODE - after_script: - - !reference [.default-after-script] - - !reference [.install_ssh_keys] - - !reference [.ssh-config-setup] - - - !reference [.destroy_xsoar_instances] - -demisto-sdk-nightly:run-end-to-end-tests-xsiam: - rules: - - when: never - tags: - - gce - extends: - - .default-job-settings - - .sdk-nightly-schedule-rule-always - services: # needed for running e2e tests for docker - - name: ${DOCKER_IO}/library/docker:20.10.12-dind - alias: docker - variables: - CLOUD_MACHINES_TYPE: "build" - CLOUD_MACHINES_COUNT: 1 - INSTANCE_ROLE: "xsiam" - GCS_QUEUE_FILE: "queue-ga" - GCS_LOCKS_PATH: "content-locks/locks-xsiam-ga" - CLOUD_SERVERS_FILE: "xsiam_servers_path" - XSIAM_SERVERS_PATH: "./xsiam_servers.json" - CLOUD_API_KEYS: $XSIAM_API_KEYS - CLOUD_API_TOKENS: $XSIAM_TOKENS - ARTIFACTS_FOLDER: "${ARTIFACTS_FOLDER_MPV2}" - ARTIFACTS_FOLDER_INSTANCE: "${ARTIFACTS_FOLDER_MPV2}/instance_${INSTANCE_ROLE}" - GCS_MARKET_BUCKET: "${GCS_MARKET_V2_BUCKET}" - GCS_SOURCE_BUCKET: "$GCS_PRODUCTION_V2_BUCKET" - GCS_MACHINES_BUCKET: "marketplace-v2-dist-dev/upload-flow/builds-xsiam" - SERVER_TYPE: "XSIAM" - MARKETPLACE_NAME: "marketplacev2" - NON_REMOVABLE_PACKS: "Base" - EXTRACT_PRIVATE_TESTDATA: "true" - FAIL_ON_ERROR: "true" - E2E_TEST_FOLDER: "xsiam" - PRODUCT_TYPE: "XSIAM" - ARTIFACTS_FOLDER_SERVER_TYPE: "${ARTIFACTS_FOLDER_MPV2}/server_type_${SERVER_TYPE}" - ENV_RESULTS_PATH: "${ARTIFACTS_FOLDER_SERVER_TYPE}/env_results.json" - needs: ["demisto-sdk-nightly:mpv2-prepare-testing-bucket"] - stage: run-instances - script: - - EXIT_CODE=0 - - - !reference [.copy_env_results] - - - !reference [.download-demisto-conf] - - !reference [.secrets-fetch] - - !reference [.lock-machine] - - - !reference [.export_cloud_machine_constants] - - - !reference [.run_end_to_end_tests] - - - !reference [.prepare_for_test_content] - - - !reference [.run_test_content] - - - !reference [.assert_test_content] - - - !reference [.cleanup_after_test_content] - - - !reference [.cloud-machine-information] - - job-done - - exit $EXIT_CODE - - after_script: - - source .gitlab/helper_functions.sh - - !reference [.unlock-machine] - -demisto-sdk-nightly:run-end-to-end-tests-xsoar-saas: - rules: - - when: never - tags: - - gce - extends: - - .default-job-settings - - .sdk-nightly-schedule-rule-always - services: # needed for running e2e tests for docker - - name: ${DOCKER_IO}/library/docker:20.10.12-dind - alias: docker - variables: - CLOUD_MACHINES_TYPE: "build" - CLOUD_MACHINES_COUNT: 1 - INSTANCE_ROLE: "xsoar_saas" - SERVER_TYPE: "XSOAR SAAS" - GCS_QUEUE_FILE: "queue-ga" - GCS_LOCKS_PATH: "content-locks/locks-xsoar-ng" - CLOUD_SERVERS_FILE: "xsoar_ng_servers_path" - CLOUD_API_KEYS: $XSOAR_NG_API_KEYS - GCS_SOURCE_BUCKET: "${GCS_PRODUCTION_XSOAR_SAAS_BUCKET}" - ARTIFACTS_FOLDER: "${ARTIFACTS_FOLDER_XSOAR}" - ARTIFACTS_FOLDER_INSTANCE: "${ARTIFACTS_FOLDER}/instance_${INSTANCE_ROLE}" - GCS_MACHINES_BUCKET: "marketplace-saas-dist-dev/upload-flow/builds-xsoar-ng" - MARKETPLACE_NAME: "xsoar_saas" - NON_REMOVABLE_PACKS: "Base" - FAIL_ON_ERROR: "true" - E2E_TEST_FOLDER: "xsoar-saas" - PRODUCT_TYPE: "XSOAR" - CLOUD_API_TOKENS: $XSIAM_TOKENS - ARTIFACTS_FOLDER_SERVER_TYPE: "${ARTIFACTS_FOLDER_XSOAR}/server_type_${SERVER_TYPE}" - ENV_RESULTS_PATH: "${ARTIFACTS_FOLDER_SERVER_TYPE}/env_results.json" - needs: ["demisto-sdk-nightly:xsoar-saas-prepare-testing-bucket"] - stage: run-instances - script: - - EXIT_CODE=0 - - - !reference [.copy_env_results] - - - !reference [.download-demisto-conf] - - !reference [.secrets-fetch] - - !reference [.lock-machine] - - - !reference [.uninstall-packs-and-reset-bucket-cloud] - - !reference [.export_cloud_machine_constants] - - - !reference [.run_end_to_end_tests] - - - !reference [.prepare_for_test_content] - - - !reference [.run_test_content] - - - !reference [.assert_test_content] - - - !reference [.cleanup_after_test_content] - - - !reference [.cloud-machine-information] - - job-done - - exit $EXIT_CODE - - after_script: - - source .gitlab/helper_functions.sh - - !reference [.unlock-machine] - -demisto-sdk-nightly:fan-in: - tags: - - gke - stage: fan-in - extends: - - .sdk-nightly-schedule-rule-always - script: - - echo "fan in" - -demisto-sdk-nightly:trigger-slack-notify: - extends: - - .trigger-slack-notification - - .sdk-nightly-schedule-rule-always - variables: # Passes the environment variable from the parent pipeline to the child which can be useful for cases when triggering pipeline with alternate env variable value passed in the API call. - PIPELINE_TO_QUERY: $CI_PIPELINE_ID - WORKFLOW: "Demisto SDK Nightly" - JOB_NAME: "demisto-sdk-nightly:fan-in" - DEMISTO_SDK_NIGHTLY: $DEMISTO_SDK_NIGHTLY - OVERRIDE_SDK_REF: $OVERRIDE_SDK_REF - SDK_REF: $SDK_REF - SLACK_CHANNEL: $SLACK_CHANNEL - SLACK_JOB: "true" - SLACK_ALLOW_FAILURE: "false" - CI_PROJECT_ID: $CI_PROJECT_ID - CI_SERVER_URL: $CI_SERVER_URL - JIRA_SERVER_URL: $JIRA_SERVER_URL - JIRA_VERIFY_SSL: $JIRA_VERIFY_SSL - JIRA_API_KEY: $JIRA_API_KEY - JIRA_PROJECT_ID: $JIRA_PROJECT_ID - JIRA_ISSUE_UNRESOLVED_TRANSITION_NAME: $JIRA_ISSUE_UNRESOLVED_TRANSITION_NAME diff --git a/.gitlab/ci/.gitlab-ci.sdk-release.yml b/.gitlab/ci/.gitlab-ci.sdk-release.yml deleted file mode 100644 index f91eddc94446..000000000000 --- a/.gitlab/ci/.gitlab-ci.sdk-release.yml +++ /dev/null @@ -1,240 +0,0 @@ -.release-sdk: - rules: - - if: '$SDK_RELEASE == "true"' - -create-release-branch: - stage: create-release-branch - extends: - - .release-sdk - - .default-job-settings - variables: - SLACK_CHANNEL: $SLACK_CHANNEL - RELEASE_VERSION: $RELEASE_VERSION - GITHUB_TOKEN: $GITHUB_TOKEN - SDK_BRANCH_NAME: $SDK_BRANCH_NAME - CI_PIPELINE_URL: $CI_PIPELINE_URL - REVIEWER: $REVIEWER - script: - - section_start "Create release branch" --collapsed - - python3 ./Tests/sdk_release/pre_validations.py -t "${GITHUB_TOKEN}" -gt "${GITLAB_API_TOKEN}" -v "${RELEASE_VERSION}" -r "${REVIEWER}" -b "${SDK_BRANCH_NAME}" - - python3 ./Tests/scripts/gitlab_basic_slack_notifier.py -s "${SLACK_TOKEN}" -t "The release of demisto-sdk version \`"$RELEASE_VERSION"\` has started:\`"$CI_PIPELINE_URL"\` " --allow-failure "${SLACK_ALLOW_FAILURE}" -ch "${SLACK_CHANNEL}" - - python3 ./Tests/sdk_release/create_release_branch.py -t "${GITHUB_TOKEN}" -n "${RELEASE_VERSION}" -b "${SDK_BRANCH_NAME}" - - section_end "Create release branch" - -trigger-content-private-nightly: - stage: trigger-nightlies - extends: - - .release-sdk - - .default-job-settings - needs: - - create-release-branch - artifacts: - expire_in: 48 hrs - paths: - - ${CI_PROJECT_DIR}/artifacts/* - when: always - variables: - SLACK_CHANNEL: $SLACK_CHANNEL - RELEASE_VERSION: $RELEASE_VERSION - GITHUB_TOKEN: $GITHUB_TOKEN - ARTIFACTS_FOLDER: "${CI_PROJECT_DIR}/artifacts/" - script: - - section_start "Trigger content-private nightly build" --collapsed - - python3 ./Utils/trigger_private_build.py --nightly --github-token "${GITHUB_TOKEN}" --slack-channel "${SLACK_CHANNEL}" --sdk-ref "${RELEASE_VERSION}" --artifacts-folder "${ARTIFACTS_FOLDER}" - - section_end "Trigger content-private nightly build" - -trigger-content-internal-dist-nightly: - stage: trigger-nightlies - extends: - - .release-sdk - - .default-job-settings - needs: - - create-release-branch - artifacts: - expire_in: 48 hrs - paths: - - ${CI_PROJECT_DIR}/artifacts/* - when: always - variables: - CONTENT_INTERNAL_DIST_TRIGGER_NIGHTLY: $CONTENT_INTERNAL_DIST_TRIGGER_NIGHTLY - SLACK_CHANNEL: $SLACK_CHANNEL - RELEASE_VERSION: $RELEASE_VERSION - ARTIFACTS_FOLDER: "${CI_PROJECT_DIR}/artifacts/" - script: - - section_start "Trigger content-internal-dist nightly build" --collapsed - - export GOLD_PIPELINE_ID=$(./Utils/gitlab_triggers/trigger_content_internal_dist_nightly_build.sh -ct "${CONTENT_INTERNAL_DIST_TRIGGER_NIGHTLY}" -ch "${SLACK_CHANNEL}" -sdk "${RELEASE_VERSION}" | jq .id) - - echo "${GOLD_PIPELINE_ID}" > "${ARTIFACTS_FOLDER}/GOLD_PIPELINE_ID.txt" - - echo "content-internal-dist nightly build triggered successfully:" - - echo "${CI_SERVER_URL}/xdr/cortex-content/content-internal-dist/-/pipelines/${GOLD_PIPELINE_ID}" - - section_end "Trigger content-internal-dist nightly build" - -trigger-demisto-sdk-nightly: - stage: trigger-nightlies - extends: - - .release-sdk - - .default-job-settings - needs: - - create-release-branch - artifacts: - expire_in: 48 hrs - paths: - - ${CI_PROJECT_DIR}/artifacts/* - when: always - variables: - CI_TOKEN: $CI_TOKEN - SLACK_CHANNEL: $SLACK_CHANNEL - RELEASE_VERSION: $RELEASE_VERSION - ARTIFACTS_FOLDER: "${CI_PROJECT_DIR}/artifacts/" - script: - - section_start "Trigger SDK nightly build" --collapsed - - export SDK_PIPELINE_ID=$(./Utils/trigger_nightly_sdk_build.sh -ct "${CI_TOKEN}" -ch "${SLACK_CHANNEL}" -sr "${RELEASE_VERSION}" -g | jq .id) - - echo $SDK_PIPELINE_ID > ${ARTIFACTS_FOLDER}/SDK_PIPELINE_ID.txt - - echo "demisto-sdk nightly build triggered successfully:" - - echo $CI_PROJECT_URL"/-/pipelines/"$SDK_PIPELINE_ID - - section_end "Trigger sdk nightly build" - -wait-for-content-private-nightly: - stage: wait-for-build-to-finish - extends: - - .release-sdk - - .default-job-settings - allow_failure: true - needs: - - trigger-content-private-nightly - artifacts: - expire_in: 48 hrs - paths: - - ${CI_PROJECT_DIR}/artifacts/* - timeout: 6 hours - variables: - ARTIFACTS_FOLDER: "${CI_PROJECT_DIR}/artifacts/" - GITHUB_TOKEN: $GITHUB_TOKEN - script: - - section_start "Wait for content-private nightly build" --collapsed - - python3 ./Utils/get_private_build_status.py --github-token "${GITHUB_TOKEN}" --artifacts-folder "${ARTIFACTS_FOLDER}" - - section_end "Wait for content-private nightly build" - -wait-for-content-internal-dist-nightly: - stage: wait-for-build-to-finish - extends: - - .release-sdk - - .default-job-settings - allow_failure: true - needs: - - trigger-content-internal-dist-nightly - artifacts: - expire_in: 48 hrs - paths: - - ${CI_PROJECT_DIR}/artifacts/* - timeout: 6 hours - variables: - ARTIFACTS_FOLDER: "${CI_PROJECT_DIR}/artifacts/" - script: - - section_start "Wait for content-internal-dist nightly build" --collapsed - - GOLD_PIPELINE_ID=$(head -n 1 "${ARTIFACTS_FOLDER}/GOLD_PIPELINE_ID.txt") - - python3 ./Tests/sdk_release/wait_for_pipeline.py -g "${GITLAB_API_TOKEN}" -p "${GOLD_PIPELINE_ID}" -pid "${GOLD_PROJECT_ID}" - - section_end "Wait for content-internal-dist nightly build" - -wait-for-demisto-sdk-nightly: - stage: wait-for-build-to-finish - extends: - - .release-sdk - - .default-job-settings - allow_failure: true - needs: - - trigger-demisto-sdk-nightly - artifacts: - expire_in: 48 hrs - paths: - - ${CI_PROJECT_DIR}/artifacts/* - timeout: 6 hours - variables: - ARTIFACTS_FOLDER: "${CI_PROJECT_DIR}/artifacts/" - script: - - section_start "Wait for SDK nightly build" --collapsed - - SDK_PIPELINE_ID=$(head -n 1 "${ARTIFACTS_FOLDER}/SDK_PIPELINE_ID.txt") - - python3 ./Tests/sdk_release/wait_for_pipeline.py -g "${GITLAB_API_TOKEN}" -p "${SDK_PIPELINE_ID}" -pid "${CI_PROJECT_ID}" - - section_end "Wait for SDK nightly build" - -slack-notify: - stage: wait-for-build-to-finish - extends: - - .release-sdk - - .default-job-settings - allow_failure: true - needs: - - wait-for-demisto-sdk-nightly - - wait-for-content-private-nightly - - wait-for-content-internal-dist-nightly - timeout: 6 hours - variables: - REVIEWER: $REVIEWER - SLACK_CHANNEL: $SLACK_CHANNEL - script: - - section_start "Slack notify" --collapsed - - python3 ./Tests/scripts/gitlab_basic_slack_notifier.py -s "${SLACK_TOKEN}" -t "All nightlies builds has been finished, please check their status and continue with the release" --allow-failure "${SLACK_ALLOW_FAILURE}" -gt "${GITLAB_API_TOKEN}" -gu "${REVIEWER}" -ch "${SLACK_CHANNEL}" - - section_end "Slack notify" - -create-release-pull-request: - stage: release-flow - extends: - - .release-sdk - - .default-job-settings - needs: - - slack-notify - when: manual - variables: - RELEASE_VERSION: $RELEASE_VERSION - GITHUB_TOKEN: $GITHUB_TOKEN - IS_DRAFT: $IS_DRAFT - REVIEWER: $REVIEWER - SLACK_CHANNEL: $SLACK_CHANNEL - ARTIFACTS_FOLDER: "${CI_PROJECT_DIR}/artifacts/" - script: - - section_start "Create sdk release pr" --collapsed - - python3 ./Tests/sdk_release/create_sdk_pr.py -t "${GITHUB_TOKEN}" -b "${RELEASE_VERSION}" -d "${IS_DRAFT}" -ro "${REVIEWER}" --artifacts-folder "${ARTIFACTS_FOLDER}" - - python3 ./Tests/scripts/gitlab_basic_slack_notifier.py -s "${SLACK_TOKEN}" --allow-failure "${SLACK_ALLOW_FAILURE}" -gt "${GITLAB_API_TOKEN}" -gu "${REVIEWER}" -ch "${SLACK_CHANNEL}" -f "${ARTIFACTS_FOLDER}/SDK_PR_READY.txt" - - section_end "Create sdk release pr" - -deploy-sdk-to-pypi: - stage: release-flow - extends: - - .release-sdk - - .default-job-settings - needs: - - create-release-pull-request - when: manual - timeout: 6 hours - variables: - RELEASE_VERSION: $RELEASE_VERSION - GITHUB_TOKEN: $GITHUB_TOKEN - IS_DRAFT: $IS_DRAFT - REVIEWER: $REVIEWER - ARTIFACTS_FOLDER: "${CI_PROJECT_DIR}/artifacts/" - script: - - section_start "Deploy SDK release" --collapsed - - python3 ./Tests/sdk_release/create_release.py -t "${GITHUB_TOKEN}" -b "${RELEASE_VERSION}" -d "${IS_DRAFT}" - - python3 ./Tests/sdk_release/wait_for_release.py -b "${RELEASE_VERSION}" - - python3 ./Tests/sdk_release/create_content_pr.py -t "${GITHUB_TOKEN}" -b "${RELEASE_VERSION}" -r "${REVIEWER}" --artifacts-folder "${ARTIFACTS_FOLDER}" -d "${IS_DRAFT}" - - python3 ./Tests/scripts/gitlab_basic_slack_notifier.py -s "${SLACK_TOKEN}" -f "${ARTIFACTS_FOLDER}/SLACK_MERGE_PRS_REQUEST.txt" --allow-failure "${SLACK_ALLOW_FAILURE}" -ch "${SLACK_CHANNEL}" -gu "${REVIEWER}" -gt "${GITLAB_API_TOKEN}" - - section_end "Deploy SDK release" - -wait-for-prs: - stage: release-flow - extends: - - .release-sdk - - .default-job-settings - needs: - - deploy-sdk-to-pypi - timeout: 6 hours - variables: - RELEASE_VERSION: $RELEASE_VERSION - GITHUB_TOKEN: $GITHUB_TOKEN - IS_DRAFT: $IS_DRAFT - ARTIFACTS_FOLDER: "${CI_PROJECT_DIR}/artifacts/" - SLACK_CHANNEL: $SLACK_CHANNEL - script: - - section_start "Wait for prs to be merged" --collapsed - - python3 ./Tests/sdk_release/wait_for_release_prs.py -t "${GITHUB_TOKEN}" -b "${RELEASE_VERSION}" --artifacts-folder "${ARTIFACTS_FOLDER}" - - python3 ./Tests/scripts/gitlab_basic_slack_notifier.py -s "${SLACK_TOKEN}" -f "${ARTIFACTS_FOLDER}/CHANGELOG_SLACK.txt" --allow-failure "${SLACK_ALLOW_FAILURE}" -ch "${SLACK_CHANNEL}" - - section_end "Wait for prs to be merged" \ No newline at end of file diff --git a/.gitlab/ci/.gitlab-ci.security-scans.yml b/.gitlab/ci/.gitlab-ci.security-scans.yml deleted file mode 100644 index 4585401aa24a..000000000000 --- a/.gitlab/ci/.gitlab-ci.security-scans.yml +++ /dev/null @@ -1,54 +0,0 @@ -.auto-secure-cicd-rule: - rules: - - if: '$SECURITY_SCANS == "true"' - -.auto-secure-cicd-rule-always: - rules: - - if: '$SECURITY_SCANS == "true"' - when: always - -stages: - - security - -auto secure cicd: - stage: security - variables: - PYTHONPATH: "/root/prodsec_tools/" - trigger: - include: - - file: "/.gitlab/ci/security-scans.yml" - ref: master - project: "${CI_PROJECT_NAMESPACE}/infra" - strategy: depend - extends: - .auto-secure-cicd-rule - -fan-in-security-scans: - tags: - - gke - stage: fan-in - extends: - - .auto-secure-cicd-rule-always - script: - - echo "fan in" - - -slack-notify-security-scans: - extends: - - .trigger-slack-notification - - .auto-secure-cicd-rule-always - variables: # Passes the environment variable from the parent pipeline to the child which can be useful for cases when triggering pipeline with alternate env variable value passed in the API call. - PIPELINE_TO_QUERY: $CI_PIPELINE_ID - WORKFLOW: 'Security Scans' - JOB_NAME: 'fan-in-security-scans' - SLACK_CHANNEL: $SLACK_CHANNEL - SLACK_JOB: 'true' - SLACK_ALLOW_FAILURE: 'false' - CI_PROJECT_ID: $CI_PROJECT_ID - CI_SERVER_URL: $CI_SERVER_URL - JIRA_SERVER_URL: $JIRA_SERVER_URL - JIRA_VERIFY_SSL: $JIRA_VERIFY_SSL - JIRA_API_KEY: $JIRA_API_KEY - JIRA_PROJECT_ID: $JIRA_PROJECT_ID - JIRA_ISSUE_UNRESOLVED_TRANSITION_NAME: $JIRA_ISSUE_UNRESOLVED_TRANSITION_NAME - diff --git a/.gitlab/ci/.gitlab-ci.slack-notify.yml b/.gitlab/ci/.gitlab-ci.slack-notify.yml deleted file mode 100644 index 468b2a1fc5f6..000000000000 --- a/.gitlab/ci/.gitlab-ci.slack-notify.yml +++ /dev/null @@ -1,29 +0,0 @@ -default: - image: ${DOCKER_IO}/devdemisto/gitlab-content-ci:1.0.0.64455 - artifacts: - expire_in: 30 days - paths: - - ${CI_PROJECT_DIR}/artifacts/* - - ${CI_PROJECT_DIR}/pipeline_jobs_folder/* - when: always - -stages: - - notify - -include: - - local: .gitlab/ci/.gitlab-ci.variables.yml - - local: .gitlab/ci/.gitlab-ci.global.yml - -slack-notify: - tags: - - gke - stage: notify - extends: .default-job-settings - script: - - !reference [.download-demisto-conf] - - python3 ./Tests/scripts/gitlab_slack_notifier.py -p "${PIPELINE_TO_QUERY}" -s "${SLACK_TOKEN}" -c "${GITLAB_STATUS_TOKEN}" -ch "${SLACK_CHANNEL}" --triggering-workflow "${WORKFLOW}" --allow-failure "${SLACK_ALLOW_FAILURE}" --name-mapping_path "${CI_PROJECT_DIR}/name_mapping.json" - retry: - max: 2 - needs: - - pipeline: $PIPELINE_TO_QUERY - job: $JOB_NAME diff --git a/.gitlab/ci/.gitlab-ci.staging.yml b/.gitlab/ci/.gitlab-ci.staging.yml deleted file mode 100644 index dc74a0e46b09..000000000000 --- a/.gitlab/ci/.gitlab-ci.staging.yml +++ /dev/null @@ -1 +0,0 @@ -# this file is reserved for staging jobs \ No newline at end of file diff --git a/.gitlab/ci/.gitlab-ci.test-native-candidate.yml b/.gitlab/ci/.gitlab-ci.test-native-candidate.yml deleted file mode 100644 index b4074efe5163..000000000000 --- a/.gitlab/ci/.gitlab-ci.test-native-candidate.yml +++ /dev/null @@ -1,75 +0,0 @@ -.test-native-candidate-rule: - rules: - - if: '$CI_PIPELINE_SOURCE =~ /^(schedule|trigger)$/ && $DEMISTO_TEST_NATIVE_CANDIDATE == "true"' - -# used for jobs which we want to run in a pipeline even when previous jobs in the pipeline fail e.g. Slack notification -.test-native-candidate-rule-always: - rules: - - if: '$CI_PIPELINE_SOURCE =~ /^(schedule|trigger)$/ && $DEMISTO_TEST_NATIVE_CANDIDATE == "true"' - when: always - -test-native-candidate:run-pre-commit-with-native-candidate: - extends: - - .pre-commit-settings - - .test-native-candidate-rule - script: - - section_start "Versions" - - | - echo "demisto-sdk version: $(demisto-sdk --version)" - echo "mypy version: $(mypy --version)" - echo "flake8 py2 version: $(python2 -m flake8 --version)" - echo "flake8 py3 version: $(python3 -m flake8 --version)" - echo "bandit py2 version: $(python2 -m bandit --version 2>&1)" - echo "bandit py3 version: $(python3 -m bandit --version 2>&1)" - echo "vulture py2 version: $(python2 -m vulture --version 2>&1)" - echo "vulture py3 version: $(python3 -m vulture --version 2>&1)" - - section_end "Versions" - - section_start "Revoking GCP Auth and Configure Docker" - - gcloud auth revoke "${GCS_ARTIFACTS_ACCOUNT_NAME}" >> "${ARTIFACTS_FOLDER}/logs/gcloud_auth.log" 2>&1 - - gcloud auth configure-docker ${DOCKER_IO_DOMAIN} >> "${ARTIFACTS_FOLDER}/logs/configure_docker_with_registry.log" 2>&1 - - section_end "Revoking GCP Auth and Configure Docker" - - section_start "Run pre-commit with Native Candidate" - - | - mkdir ./unit-tests - if [[ "$NATIVE_CANDIDATE_IMAGE" == "latest" ]]; then - echo "----------Getting tag for latest native:dev----------" - NATIVE_CANDIDATE_IMAGE=$(python3 Tests/scripts/get_latest_docker_image.py -i "demisto/py3-native") - echo "Found latest image: $NATIVE_CANDIDATE_IMAGE" - fi - echo "----------Running lint on specific native candidate image $NATIVE_CANDIDATE_IMAGE----------" - touch $ARTIFACTS_FOLDER/native_candidate_image.txt - echo "$NATIVE_CANDIDATE_IMAGE" >> $ARTIFACTS_FOLDER/native_candidate_image.txt - demisto-sdk pre-commit -a --mode=nightly --docker-image native:candidate --image-ref "$NATIVE_CANDIDATE_IMAGE" - echo "Native candidate full image name was: $NATIVE_CANDIDATE_IMAGE" - - section_end "Run pre-commit with Native Candidate" - - job-done - - -test-native-candidate:fan-in: - tags: - - gke - stage: fan-in - extends: - - .test-native-candidate-rule-always - script: - - echo "fan in" - - -test-native-candidate:trigger-slack-notify: - extends: - - .trigger-slack-notification - - .test-native-candidate-rule-always - variables: # Passes the environment variable from the parent pipeline to the child which can be useful for cases when triggering pipeline with alternate env variable value passed in the API call. - PIPELINE_TO_QUERY: $CI_PIPELINE_ID - WORKFLOW: 'Test Native Candidate' - JOB_NAME: 'test-native-candidate:fan-in' - SLACK_CHANNEL: $SLACK_CHANNEL - SLACK_JOB: 'true' - SLACK_ALLOW_FAILURE: 'false' - CI_PROJECT_ID: $CI_PROJECT_ID - CI_SERVER_URL: $CI_SERVER_URL - JIRA_SERVER_URL: $JIRA_SERVER_URL - JIRA_VERIFY_SSL: $JIRA_VERIFY_SSL - JIRA_API_KEY: $JIRA_API_KEY - JIRA_PROJECT_ID: $JIRA_PROJECT_ID - JIRA_ISSUE_UNRESOLVED_TRANSITION_NAME: $JIRA_ISSUE_UNRESOLVED_TRANSITION_NAME diff --git a/.gitlab/ci/.gitlab-ci.trigger-contribution-build.yml b/.gitlab/ci/.gitlab-ci.trigger-contribution-build.yml deleted file mode 100644 index 4597afd3c986..000000000000 --- a/.gitlab/ci/.gitlab-ci.trigger-contribution-build.yml +++ /dev/null @@ -1,20 +0,0 @@ -.trigger-contribution-build-role: - rules: - - if: '$CI_PIPELINE_SOURCE =~ /^(schedule|trigger)$/ && $TRIGGER_CONTRIBUTION_BUILD == "true"' - -stages: - - trigger-contribution-build - -trigger-contribution-build: - stage: trigger-contribution-build - variables: - WORKFLOW: 'Trigger contribution build' - GITHUB_TOKEN: $GITHUB_TOKEN - GITLAB_API_TOKEN: $GITLAB_API_TOKEN - TRIGGER_CONTRIBUTION: $TRIGGER_CONTRIBUTION - extends: - - .trigger-contribution-build-role - - .default-job-settings - script: - - pip install PyGithub python-gitlab coloredlogs demisto_sdk - - python3 ./Utils/trigger_contribution_build.py --github-token "${GITHUB_TOKEN}" --gitlab-api-token "${GITLAB_API_TOKEN}" --gitlab-trigger-token "${TRIGGER_CONTRIBUTION}" \ No newline at end of file diff --git a/.gitlab/ci/.gitlab-ci.variables.yml b/.gitlab/ci/.gitlab-ci.variables.yml deleted file mode 100644 index f5e1db900d0e..000000000000 --- a/.gitlab/ci/.gitlab-ci.variables.yml +++ /dev/null @@ -1,51 +0,0 @@ -variables: - ARTIFACTORY_ENABLED: "true" - PIP_DISABLE_PIP_VERSION_CHECK: "1" - DONT_CACHE_LAST_RESPONSE: "true" - GCS_MARKET_BUCKET: "marketplace-dist" - GCS_MARKET_V2_BUCKET: "marketplace-v2-dist" - GCS_MARKET_XPANSE_BUCKET: "xpanse-dist" - GCS_MARKET_XSOAR_SAAS_BUCKET: "marketplace-saas-dist" - GCS_MARKET_BUCKET_DEV: "marketplace-dist-dev" - GCS_MARKET_V2_BUCKET_DEV: "marketplace-v2-dist-dev" - GCS_MARKET_XSOAR_SAAS_BUCKET_DEV: "marketplace-saas-dist-dev" - STORAGE_BASE_PATH: "" - SLACK_CHANNEL: "dmst-build-test" - DEMISTO_README_VALIDATION: "true" - ARTIFACTS_FOLDER: "${CI_PROJECT_DIR}/artifacts" - PIPELINE_JOBS_FOLDER: "${CI_PROJECT_DIR}/pipeline_jobs_folder" - ARTIFACTS_FOLDER_XSOAR: "${CI_PROJECT_DIR}/artifacts/xsoar" # Used for both XSOAR on-prem and XSOAR SaaS - ARTIFACTS_FOLDER_MPV2: "${CI_PROJECT_DIR}/artifacts/marketplacev2" - ARTIFACTS_FOLDER_XPANSE: "${CI_PROJECT_DIR}/artifacts/xpanse" - JIRA_ISSUE_UNRESOLVED_TRANSITION_NAME: "Backlog" - JIRA_PROJECT_ID: "CIAC" - JIRA_VERIFY_SSL: "True" - # Jira additional fields are a json string that will be parsed into a dictionary containing the name of the field as the key and the value as a dictionary containing the value of the field. - JIRA_ADDITIONAL_FIELDS: '{"customfield_19581":{"value": "IN"},"customfield_19582":{"value": "FALSE"},"customfield_19583":{"value": "Single"}}' - BASH_ENV: "${CI_PROJECT_DIR}/artifacts/bash_env" - PYTHONPATH: "${CI_PROJECT_DIR}" - PIP_CACHE_DIR: "$CI_PROJECT_DIR/.cache/pip" - PRE_COMMIT_HOME: "${CI_PROJECT_DIR}/.cache/pre-commit" - FEATURE_BRANCH_NAME: "v4.5.0" - IFRA_ENV_TYPE: "Content-Env" - GET_SOURCES_ATTEMPTS: 3 # see https://docs.gitlab.com/ee/ci/runners/configure_runners.html#job-stages-attempts - GCS_PRODUCTION_BUCKET: "marketplace-dist" - GCS_PRODUCTION_V2_BUCKET: "marketplace-v2-dist" - GCS_PRODUCTION_XPANSE_BUCKET: "xpanse-dist" - GCS_PRODUCTION_XSOAR_SAAS_BUCKET: "marketplace-saas-dist" - DEMISTO_CONNECTION_POOL_MAXSIZE: "180" # see this issue for more info https://github.com/demisto/etc/issues/36886 - SDK_REF: "master" # The default sdk branch to use. - OVERRIDE_SDK_REF: "" # Whether to override the sdk branch. - OVERRIDE_ALL_PACKS: "false" - TEST_UPLOAD: "true" - NATIVE_CANDIDATE_IMAGE: "latest" - DEMISTO_SDK_LOG_FILE_PATH: "${ARTIFACTS_FOLDER}/logs" - CONTENT_GITLAB_CI: "true" - POETRY_VIRTUALENVS_OPTIONS_ALWAYS_COPY: "true" - FF_USE_FASTZIP: "true" - POETRY_VERSION: "1.8.2" - INSTALL_POETRY: "true" - DOCKER_IO: "docker.io" # defined in the project level CI/CD variables - KUBERNETES_CPU_REQUEST: 2 - KUBERNETES_MEMORY_REQUEST: 2Gi - RESET_CORE_PACK_VERSION: "true" diff --git a/.gitlab/ci/.gitlab-ci.yml b/.gitlab/ci/.gitlab-ci.yml index 2fa8461dd3a7..e555781ee8b1 100644 --- a/.gitlab/ci/.gitlab-ci.yml +++ b/.gitlab/ci/.gitlab-ci.yml @@ -1,38 +1,7 @@ - -default: - image: ${DOCKER_IO}/devdemisto/gitlab-content-ci:1.0.0.64455 - artifacts: - expire_in: 30 days - paths: - - ${CI_PROJECT_DIR}/artifacts/* - - ${CI_PROJECT_DIR}/pipeline_jobs_folder/* - when: always - -stages: - - cleanup - - security - - unittests-and-validations - - prepare-testing-bucket - - run-instances - - results - - upload-to-marketplace - - create-release-branch - - trigger-nightlies - - wait-for-build-to-finish - - release-flow - - are-jobs-really-done - - fan-in # concentrate pipeline artifacts to single job before triggering child slack pipeline - - trigger-contribution-build +variables: + CURRENT_BRANCH_NAME: $INFRA_BRANCH include: - - local: .gitlab/ci/.gitlab-ci.variables.yml - - local: .gitlab/ci/.gitlab-ci.global.yml - - local: .gitlab/ci/.gitlab-ci.on-push.yml - - local: .gitlab/ci/.gitlab-ci.bucket-upload.yml - - local: .gitlab/ci/.gitlab-ci.sdk-nightly.yml - - local: .gitlab/ci/.gitlab-ci.miscellaneous.yml - - local: .gitlab/ci/.gitlab-ci.test-native-candidate.yml - - local: .gitlab/ci/.gitlab-ci.security-scans.yml - - local: .gitlab/ci/.gitlab-ci.build-machines-cleanup.yml - - local: .gitlab/ci/.gitlab-ci.sdk-release.yml - - local: .gitlab/ci/.gitlab-ci.trigger-contribution-build.yml + - file: "/.gitlab/ci/content-ci/ci/.gitlab-ci.yml" + ref: $INFRA_BRANCH + project: "${CI_PROJECT_NAMESPACE}/infra" diff --git a/.gitlab/helper_functions.sh b/.gitlab/helper_functions.sh index b390d69a19a6..14fa385de567 100644 --- a/.gitlab/helper_functions.sh +++ b/.gitlab/helper_functions.sh @@ -75,3 +75,4 @@ sleep-with-progress() { local sleep_step=$((sleep_time / sleep_interval)) for ((i=0; i< sleep_step;i++)); do echo "${sleep_interval}";sleep "${sleep_interval}"; done | tqdm --total ${sleep_time} --unit seconds --leave --update --colour green -ncols ${columns} --desc "${sleep_message}" 1> /dev/null } + diff --git a/.hooks/pre-commit b/.hooks/pre-commit old mode 100644 new mode 100755 diff --git a/.pre-commit-config_template.yaml b/.pre-commit-config_template.yaml index 221b8076ea96..12c0909dc322 100644 --- a/.pre-commit-config_template.yaml +++ b/.pre-commit-config_template.yaml @@ -19,17 +19,9 @@ repos: rev: 1.8.2 hooks: - id: poetry-check - - id: poetry-lock args: - - --check + - --lock files: ^pyproject.toml$ - - id: poetry-export - args: - - --with - - dev,typing - - --without-hashes - - -o - - requirements.txt - repo: https://github.com/hadialqattan/pycln rev: v2.1.2 hooks: @@ -138,7 +130,7 @@ repos: - --color=yes - --files copy_files: - - Tests/scripts/dev_envs/pytest/conftest.py + - Tests/scripts/pytest/conftest.py skip:commit: true pass_docker_extra_args:ci: --rm=false --network=none pass_docker_extra_args:nightly: --rm=false --network=none @@ -168,7 +160,7 @@ repos: - --junitxml=/src/.pre-commit/pytest-junit/.report_pytest.xml - --color=yes copy_files: - - Tests/scripts/dev_envs/pytest/conftest.py + - Tests/scripts/pytest/conftest.py skip:commit: true run_isolated: true pass_docker_extra_args:ci: --rm=false @@ -253,30 +245,13 @@ repos: pass_docker_extra_args:nightly: --rm=false --network=none pass_docker_extra_args: --network=none - - id: is-circle-changed - name: is-circle-changed - description: Checks if circle files are changed, and checkout to current if it is. - entry: ./Tests/scripts/is_file_up_to_date.sh .circleci/config.yml "" false - language: script - skip:nightly: true - pass_filenames: false - skip:commit: true - - - id: is-gitlab-changed - name: is-gitlab-changed - description: Checks if gitlab files are changed, and checkout to current if it is. - entry: ./Tests/scripts/is_file_up_to_date.sh .gitlab "" false - language: script - pass_filenames: false - skip:commit: true - skip:nightly: true - - id: validate name: validate description: validate content entry: demisto-sdk validate args: - --skip-pack-dependencies + - --config-path=validation_config.toml pass_filenames: false language: system require_serial: true @@ -315,32 +290,32 @@ repos: needs: - pytest-in-docker - - id: coverage-pytest-analyze - name: coverage-pytest-analyze - entry: demisto-sdk coverage-analyze - description: Running demisto-sdk coverage-analyze and showing a coverage report. - language: system - pass_filenames: false - args: - - -i - - .coverage - - --report-dir - - coverage_report - - --report-type - - all - - --previous-coverage-report-url - - https://storage.googleapis.com/marketplace-dist-dev/code-coverage-reports/coverage-min.json - args:nightly: - - -i - - .coverage - - --report-dir - - coverage_report - - --report-type - - all - - --allowed-coverage-degradation-percentage - - '100' - needs: - - pytest-in-docker + # - id: coverage-pytest-analyze + # name: coverage-pytest-analyze + # entry: demisto-sdk coverage-analyze + # description: Running demisto-sdk coverage-analyze and showing a coverage report. + # language: system + # pass_filenames: false + # args: + # - -i + # - .coverage + # - --report-dir + # - coverage_report + # - --report-type + # - all + # - --previous-coverage-report-url + # - https://storage.googleapis.com/marketplace-dist-dev/code-coverage-reports/coverage-min.json + # args:nightly: + # - -i + # - .coverage + # - --report-dir + # - coverage_report + # - --report-type + # - all + # - --allowed-coverage-degradation-percentage + # - '100' + # needs: + # - pytest-in-docker - repo: https://github.com/sourcery-ai/sourcery rev: v1.6.0 hooks: diff --git a/Packs/AMP/Integrations/CiscoAMPEventCollector/README.md b/Packs/AMP/Integrations/CiscoAMPEventCollector/README.md index 2149882f4bc1..d6c3dad0fee4 100644 --- a/Packs/AMP/Integrations/CiscoAMPEventCollector/README.md +++ b/Packs/AMP/Integrations/CiscoAMPEventCollector/README.md @@ -1,6 +1,10 @@ This is the Cisco AMP event collector integration for Cortex XSIAM. This integration was integrated and tested with version v1 of CiscoAMPEventCollector. +<~XSIAM> +This is the default integration for this content pack when configured by the Data Onboarder. + + ## Configure Cisco AMP Event Collector on Cortex XSIAM 1. Navigate to **Settings** > **Integrations** > **Servers & Services**. diff --git a/Packs/AMP/pack_metadata.json b/Packs/AMP/pack_metadata.json index 83e5834aff03..be21f34b192f 100644 --- a/Packs/AMP/pack_metadata.json +++ b/Packs/AMP/pack_metadata.json @@ -16,5 +16,6 @@ "marketplaces": [ "xsoar", "marketplacev2" - ] + ], + "defaultDataSource": "CiscoAMPEventCollector" } \ No newline at end of file diff --git a/Packs/AWS-AccessAnalyzer/Integrations/AWS-AccessAnalyzer/command_example.txt b/Packs/AWS-AccessAnalyzer/Integrations/AWS-AccessAnalyzer/command_example.txt index 6d825c4e346e..a0ac876f9a8e 100644 --- a/Packs/AWS-AccessAnalyzer/Integrations/AWS-AccessAnalyzer/command_example.txt +++ b/Packs/AWS-AccessAnalyzer/Integrations/AWS-AccessAnalyzer/command_example.txt @@ -1,6 +1,6 @@ !aws-access-analyzer-list-analyzers -!aws-access-analyzer-list-findings analyzerArn=arn:aws:access-analyzer:us-east-1:120785635586:analyzer/ConsoleAnalyzer-fc3b189d-f88a-48a5-9c2b-f42f9187c898 -!aws-access-analyzer-list-analyzed-resource analyzerArn=arn:aws:access-analyzer:us-east-1:120785635586:analyzer/ConsoleAnalyzer-fc3b189d-f88a-48a5-9c2b-f42f9187c898 -!aws-access-analyzer-get-analyzed-resource analyzerArn=arn:aws:access-analyzer:us-east-1:120785635586:analyzer/ConsoleAnalyzer-fc3b189d-f88a-48a5-9c2b-f42f9187c898 resourceArn=arn:aws:iam::120785635586:role/-TestRole -!aws-access-analyzer-get-finding analyzerArn=arn:aws:access-analyzer:us-east-1:120785635586:analyzer/ConsoleAnalyzer-fc3b189d-f88a-48a5-9c2b-f42f9187c898 findingId=03c44171-223c-4615-be8a-bf0b626f0b13 -!aws-access-analyzer-start-resource-scan analyzerArn=arn:aws:access-analyzer:us-east-1:120785635586:analyzer/ConsoleAnalyzer-fc3b189d-f88a-48a5-9c2b-f42f9187c898 resourceArn=arn:aws:iam::120785635586:role/billing-bot-role \ No newline at end of file +!aws-access-analyzer-list-findings analyzerArn=arn:aws:access-analyzer:us-east-1:123456789012:analyzer/ConsoleAnalyzer-fc3b189d-f88a-48a5-9c2b-f42f9187c898 +!aws-access-analyzer-list-analyzed-resource analyzerArn=arn:aws:access-analyzer:us-east-1:123456789012:analyzer/ConsoleAnalyzer-fc3b189d-f88a-48a5-9c2b-f42f9187c898 +!aws-access-analyzer-get-analyzed-resource analyzerArn=arn:aws:access-analyzer:us-east-1:123456789012:analyzer/ConsoleAnalyzer-fc3b189d-f88a-48a5-9c2b-f42f9187c898 resourceArn=arn:aws:iam::123456789012:role/-TestRole +!aws-access-analyzer-get-finding analyzerArn=arn:aws:access-analyzer:us-east-1:123456789012:analyzer/ConsoleAnalyzer-fc3b189d-f88a-48a5-9c2b-f42f9187c898 findingId=03c44171-223c-4615-be8a-bf0b626f0b13 +!aws-access-analyzer-start-resource-scan analyzerArn=arn:aws:access-analyzer:us-east-1:123456789012:analyzer/ConsoleAnalyzer-fc3b189d-f88a-48a5-9c2b-f42f9187c898 resourceArn=arn:aws:iam::123456789012:role/billing-bot-role \ No newline at end of file diff --git a/Packs/AWS-Athena/Integrations/AWS-Athena/test_data/expected_context/get_query_results_command.json b/Packs/AWS-Athena/Integrations/AWS-Athena/test_data/expected_context/get_query_results_command.json index fd468e25eb31..e554232bc1f7 100644 --- a/Packs/AWS-Athena/Integrations/AWS-Athena/test_data/expected_context/get_query_results_command.json +++ b/Packs/AWS-Athena/Integrations/AWS-Athena/test_data/expected_context/get_query_results_command.json @@ -7,7 +7,7 @@ "actor": "{user={type=AWSService, name=null, uid=null, uuid=null, account_uid=null, credential_uid=null}, session={created_time=null, mfa=null, issuer=null}, invoked_by=s3.amazonaws.com, idp={name=null}}", "http_request": "{user_agent=s3.amazonaws.com}", "src_endpoint": "{uid=vpce-59a25a30, ip=null, domain=s3.amazonaws.com}", - "resources": "[{uid=arn:aws:s3:::test-log-collector/2023-11-07-07-40-08-CD1B1BC0934C71D4, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::test-log-collector, account_uid=654338056632, type=AWS::S3::Bucket}]", + "resources": "[{uid=arn:aws:s3:::test-log-collector/2023-11-07-07-40-08-CD1B1BC0934C71D4, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::test-log-collector, account_uid=123456789012, type=AWS::S3::Bucket}]", "class_name": "API Activity", "class_uid": "3005", "category_name": "Audit Activity", @@ -20,9 +20,9 @@ "type_name": "API Activity: Update", "status": "Success", "status_id": "1", - "unmapped": "{additionalEventData.SSEApplied=Default_SSE_S3, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, responseElements.x-amz-version-id=CSwJV1zFxiBSHjrTGaIUH5FMMVcgl05W, additionalEventData.x-amz-id-2=71IAMvFJ3O5bJRBlpJCB3l0B8CzNy5sG7BECw2I1R4oHyvkV2FKixZqmYAir4Y5h5ldtxWA+xlk=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=s3.eu-central-1.amazonaws.com, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=6ffdc2dd-05ca-483e-8baa-842949fddced, requestParameters.key=2023-11-07-07-40-08-CD1B1BC0934C71D4, requestParameters.bucketName=test-log-collector, responseElements.x-amz-server-side-encryption=AES256, responseElements.x-amz-expiration=expiry-date=\"Wed, 15 Nov 2023 00:00:00 GMT\", rule-id=\"Delete-older-the-7-days\", recipientAccountId=654338056632, managementEvent=false, additionalEventData.bytesTransferredIn=480}", + "unmapped": "{additionalEventData.SSEApplied=Default_SSE_S3, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, responseElements.x-amz-version-id=CSwJV1zFxiBSHjrTGaIUH5FMMVcgl05W, additionalEventData.x-amz-id-2=71IAMvFJ3O5bJRBlpJCB3l0B8CzNy5sG7BECw2I1R4oHyvkV2FKixZqmYAir4Y5h5ldtxWA+xlk=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=s3.eu-central-1.amazonaws.com, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=6ffdc2dd-05ca-483e-8baa-842949fddced, requestParameters.key=2023-11-07-07-40-08-CD1B1BC0934C71D4, requestParameters.bucketName=test-log-collector, responseElements.x-amz-server-side-encryption=AES256, responseElements.x-amz-expiration=expiry-date=\"Wed, 15 Nov 2023 00:00:00 GMT\", rule-id=\"Delete-older-the-7-days\", recipientAccountId=123456789012, managementEvent=false, additionalEventData.bytesTransferredIn=480}", "region": "eu-central-1", - "accountid": "654338056632", + "accountid": "123456789012", "eventday": "20231107", "query_execution_id": "b3c194e7-6580-421c-81fa-4b409e1ba04f" }, @@ -34,7 +34,7 @@ "actor": "{user={type=AWSService, name=null, uid=null, uuid=null, account_uid=null, credential_uid=null}, session={created_time=null, mfa=null, issuer=null}, invoked_by=cloudtrail.amazonaws.com, idp={name=null}}", "http_request": "{user_agent=cloudtrail.amazonaws.com}", "src_endpoint": "{uid=null, ip=null, domain=cloudtrail.amazonaws.com}", - "resources": "[{uid=arn:aws:s3:::aws-cloudtrail-logs-654338056632-cloudlake/AWSLogs/o-re4vuxlksb/654338056632/CloudTrail/eu-central-1/2023/11/07/654338056632_CloudTrail_eu-central-1_20231107T0740Z_dimNGWjAynOPPS1e.json.gz, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::aws-cloudtrail-logs-654338056632-cloudlake, account_uid=654338056632, type=AWS::S3::Bucket}]", + "resources": "[{uid=arn:aws:s3:::aws-cloudtrail-logs-123456789012-cloudlake/AWSLogs/o-re4vuxlksb/123456789012/CloudTrail/eu-central-1/2023/11/07/123456789012_CloudTrail_eu-central-1_20231107T0740Z_dimNGWjAynOPPS1e.json.gz, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::aws-cloudtrail-logs-123456789012-cloudlake, account_uid=123456789012, type=AWS::S3::Bucket}]", "class_name": "API Activity", "class_uid": "3005", "category_name": "Audit Activity", @@ -47,9 +47,9 @@ "type_name": "API Activity: Update", "status": "Success", "status_id": "1", - "unmapped": "{additionalEventData.SSEApplied=SSE_S3, requestParameters.x-amz-acl=bucket-owner-full-control, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, additionalEventData.x-amz-id-2=CKIV9mfKLdZHOitE2UvDr21Km4whgr92dar3i5Ew4/upKPfXc97MP45lpxnGo1mhPW7RQgvuQEs=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=aws-cloudtrail-logs-654338056632-cloudlake.s3.eu-central-1.amazonaws.com, requestParameters.x-amz-server-side-encryption=AES256, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=cd4f5d26-4491-40fc-a7d9-cb10f3e99ed3, requestParameters.key=AWSLogs/o-re4vuxlksb/654338056632/CloudTrail/eu-central-1/2023/11/07/654338056632_CloudTrail_eu-central-1_20231107T0740Z_dimNGWjAynOPPS1e.json.gz, requestParameters.bucketName=aws-cloudtrail-logs-654338056632-cloudlake, responseElements.x-amz-server-side-encryption=AES256, recipientAccountId=654338056632, managementEvent=false, additionalEventData.bytesTransferredIn=2347}", + "unmapped": "{additionalEventData.SSEApplied=SSE_S3, requestParameters.x-amz-acl=bucket-owner-full-control, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, additionalEventData.x-amz-id-2=CKIV9mfKLdZHOitE2UvDr21Km4whgr92dar3i5Ew4/upKPfXc97MP45lpxnGo1mhPW7RQgvuQEs=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=aws-cloudtrail-logs-123456789012-cloudlake.s3.eu-central-1.amazonaws.com, requestParameters.x-amz-server-side-encryption=AES256, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=cd4f5d26-4491-40fc-a7d9-cb10f3e99ed3, requestParameters.key=AWSLogs/o-re4vuxlksb/123456789012/CloudTrail/eu-central-1/2023/11/07/123456789012_CloudTrail_eu-central-1_20231107T0740Z_dimNGWjAynOPPS1e.json.gz, requestParameters.bucketName=aws-cloudtrail-logs-123456789012-cloudlake, responseElements.x-amz-server-side-encryption=AES256, recipientAccountId=123456789012, managementEvent=false, additionalEventData.bytesTransferredIn=2347}", "region": "eu-central-1", - "accountid": "654338056632", + "accountid": "123456789012", "eventday": "20231107", "query_execution_id": "b3c194e7-6580-421c-81fa-4b409e1ba04f" }, @@ -61,7 +61,7 @@ "actor": "{user={type=AWSService, name=null, uid=null, uuid=null, account_uid=null, credential_uid=null}, session={created_time=null, mfa=null, issuer=null}, invoked_by=s3.amazonaws.com, idp={name=null}}", "http_request": "{user_agent=s3.amazonaws.com}", "src_endpoint": "{uid=vpce-59a25a30, ip=null, domain=s3.amazonaws.com}", - "resources": "[{uid=arn:aws:s3:::test-log-collector/2023-11-07-07-39-32-43DC7FEEAE7DFCA6, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::test-log-collector, account_uid=654338056632, type=AWS::S3::Bucket}]", + "resources": "[{uid=arn:aws:s3:::test-log-collector/2023-11-07-07-39-32-43DC7FEEAE7DFCA6, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::test-log-collector, account_uid=123456789012, type=AWS::S3::Bucket}]", "class_name": "API Activity", "class_uid": "3005", "category_name": "Audit Activity", @@ -74,9 +74,9 @@ "type_name": "API Activity: Update", "status": "Success", "status_id": "1", - "unmapped": "{additionalEventData.SSEApplied=Default_SSE_S3, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, responseElements.x-amz-version-id=LXBDlV4KJJB56OUOvwPNlb1v4Re5gpKH, additionalEventData.x-amz-id-2=0/+WsTcJgOlNKdHK/L2FmlK7IyBHUCeZDaXhRKtYAlWrPy0oTMVVtX41yxCh3HE5s2YMLFpwyGc=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=s3.eu-central-1.amazonaws.com, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=ae9c5147-998a-4a83-ae76-dd1916795763, requestParameters.key=2023-11-07-07-39-32-43DC7FEEAE7DFCA6, requestParameters.bucketName=test-log-collector, responseElements.x-amz-server-side-encryption=AES256, responseElements.x-amz-expiration=expiry-date=\"Wed, 15 Nov 2023 00:00:00 GMT\", rule-id=\"Delete-older-the-7-days\", recipientAccountId=654338056632, managementEvent=false, additionalEventData.bytesTransferredIn=480}", + "unmapped": "{additionalEventData.SSEApplied=Default_SSE_S3, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, responseElements.x-amz-version-id=LXBDlV4KJJB56OUOvwPNlb1v4Re5gpKH, additionalEventData.x-amz-id-2=0/+WsTcJgOlNKdHK/L2FmlK7IyBHUCeZDaXhRKtYAlWrPy0oTMVVtX41yxCh3HE5s2YMLFpwyGc=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=s3.eu-central-1.amazonaws.com, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=ae9c5147-998a-4a83-ae76-dd1916795763, requestParameters.key=2023-11-07-07-39-32-43DC7FEEAE7DFCA6, requestParameters.bucketName=test-log-collector, responseElements.x-amz-server-side-encryption=AES256, responseElements.x-amz-expiration=expiry-date=\"Wed, 15 Nov 2023 00:00:00 GMT\", rule-id=\"Delete-older-the-7-days\", recipientAccountId=123456789012, managementEvent=false, additionalEventData.bytesTransferredIn=480}", "region": "eu-central-1", - "accountid": "654338056632", + "accountid": "123456789012", "eventday": "20231107", "query_execution_id": "b3c194e7-6580-421c-81fa-4b409e1ba04f" }, @@ -88,7 +88,7 @@ "actor": "{user={type=AWSService, name=null, uid=null, uuid=null, account_uid=null, credential_uid=null}, session={created_time=null, mfa=null, issuer=null}, invoked_by=cloudtrail.amazonaws.com, idp={name=null}}", "http_request": "{user_agent=cloudtrail.amazonaws.com}", "src_endpoint": "{uid=null, ip=null, domain=cloudtrail.amazonaws.com}", - "resources": "[{uid=arn:aws:s3:::aws-cloudtrail-logs-654338056632-cloudlake/AWSLogs/o-re4vuxlksb/654338056632/CloudTrail/eu-central-1/2023/11/04/654338056632_CloudTrail_eu-central-1_20231104T2230Z_Ei3HUNUHdxFCuo9l.json.gz, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::aws-cloudtrail-logs-654338056632-cloudlake, account_uid=654338056632, type=AWS::S3::Bucket}]", + "resources": "[{uid=arn:aws:s3:::aws-cloudtrail-logs-123456789012-cloudlake/AWSLogs/o-re4vuxlksb/123456789012/CloudTrail/eu-central-1/2023/11/04/123456789012_CloudTrail_eu-central-1_20231104T2230Z_Ei3HUNUHdxFCuo9l.json.gz, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::aws-cloudtrail-logs-123456789012-cloudlake, account_uid=123456789012, type=AWS::S3::Bucket}]", "class_name": "API Activity", "class_uid": "3005", "category_name": "Audit Activity", @@ -101,9 +101,9 @@ "type_name": "API Activity: Update", "status": "Success", "status_id": "1", - "unmapped": "{additionalEventData.SSEApplied=SSE_S3, requestParameters.x-amz-acl=bucket-owner-full-control, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, additionalEventData.x-amz-id-2=RITtRiXX1DGn4aCJ1AguFwc0Ux/HS6LSgIjiGlJBFdeFWMwmqQk1TlibDKq5kIA2xrSZc/qyl8w=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=aws-cloudtrail-logs-654338056632-cloudlake.s3.eu-central-1.amazonaws.com, requestParameters.x-amz-server-side-encryption=AES256, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=150e3c90-ce64-4311-b9de-67ef7077751a, requestParameters.key=AWSLogs/o-re4vuxlksb/654338056632/CloudTrail/eu-central-1/2023/11/04/654338056632_CloudTrail_eu-central-1_20231104T2230Z_Ei3HUNUHdxFCuo9l.json.gz, requestParameters.bucketName=aws-cloudtrail-logs-654338056632-cloudlake, responseElements.x-amz-server-side-encryption=AES256, recipientAccountId=654338056632, managementEvent=false, additionalEventData.bytesTransferredIn=1317}", + "unmapped": "{additionalEventData.SSEApplied=SSE_S3, requestParameters.x-amz-acl=bucket-owner-full-control, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, additionalEventData.x-amz-id-2=RITtRiXX1DGn4aCJ1AguFwc0Ux/HS6LSgIjiGlJBFdeFWMwmqQk1TlibDKq5kIA2xrSZc/qyl8w=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=aws-cloudtrail-logs-123456789012-cloudlake.s3.eu-central-1.amazonaws.com, requestParameters.x-amz-server-side-encryption=AES256, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=150e3c90-ce64-4311-b9de-67ef7077751a, requestParameters.key=AWSLogs/o-re4vuxlksb/123456789012/CloudTrail/eu-central-1/2023/11/04/123456789012_CloudTrail_eu-central-1_20231104T2230Z_Ei3HUNUHdxFCuo9l.json.gz, requestParameters.bucketName=aws-cloudtrail-logs-123456789012-cloudlake, responseElements.x-amz-server-side-encryption=AES256, recipientAccountId=123456789012, managementEvent=false, additionalEventData.bytesTransferredIn=1317}", "region": "eu-central-1", - "accountid": "654338056632", + "accountid": "123456789012", "eventday": "20231104", "query_execution_id": "b3c194e7-6580-421c-81fa-4b409e1ba04f" }, @@ -115,7 +115,7 @@ "actor": "{user={type=AWSService, name=null, uid=null, uuid=null, account_uid=null, credential_uid=null}, session={created_time=null, mfa=null, issuer=null}, invoked_by=cloudtrail.amazonaws.com, idp={name=null}}", "http_request": "{user_agent=cloudtrail.amazonaws.com}", "src_endpoint": "{uid=null, ip=null, domain=cloudtrail.amazonaws.com}", - "resources": "[{uid=arn:aws:s3:::aws-cloudtrail-logs-654338056632-cloudlake/AWSLogs/o-re4vuxlksb/668688824393/CloudTrail/eu-central-1/2023/11/04/668688824393_CloudTrail_eu-central-1_20231104T2230Z_cvjIkDeGdNDHOgZs.json.gz, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::aws-cloudtrail-logs-654338056632-cloudlake, account_uid=654338056632, type=AWS::S3::Bucket}]", + "resources": "[{uid=arn:aws:s3:::aws-cloudtrail-logs-123456789012-cloudlake/AWSLogs/o-re4vuxlksb/668688824393/CloudTrail/eu-central-1/2023/11/04/668688824393_CloudTrail_eu-central-1_20231104T2230Z_cvjIkDeGdNDHOgZs.json.gz, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::aws-cloudtrail-logs-123456789012-cloudlake, account_uid=123456789012, type=AWS::S3::Bucket}]", "class_name": "API Activity", "class_uid": "3005", "category_name": "Audit Activity", @@ -128,9 +128,9 @@ "type_name": "API Activity: Update", "status": "Success", "status_id": "1", - "unmapped": "{additionalEventData.SSEApplied=SSE_S3, requestParameters.x-amz-acl=bucket-owner-full-control, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, additionalEventData.x-amz-id-2=p+d+4x2c5Cq78w5YiikRKYtXsRxTFKFiklo3uPsWV9VkYTbLqxQxlKSOvJ/3pyFMV0ghlX4jSiw=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=aws-cloudtrail-logs-654338056632-cloudlake.s3.eu-central-1.amazonaws.com, requestParameters.x-amz-server-side-encryption=AES256, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=f7f916ab-0dd4-4e99-b522-b415b9c80458, requestParameters.key=AWSLogs/o-re4vuxlksb/668688824393/CloudTrail/eu-central-1/2023/11/04/668688824393_CloudTrail_eu-central-1_20231104T2230Z_cvjIkDeGdNDHOgZs.json.gz, requestParameters.bucketName=aws-cloudtrail-logs-654338056632-cloudlake, responseElements.x-amz-server-side-encryption=AES256, recipientAccountId=654338056632, managementEvent=false, additionalEventData.bytesTransferredIn=1199}", + "unmapped": "{additionalEventData.SSEApplied=SSE_S3, requestParameters.x-amz-acl=bucket-owner-full-control, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, additionalEventData.x-amz-id-2=p+d+4x2c5Cq78w5YiikRKYtXsRxTFKFiklo3uPsWV9VkYTbLqxQxlKSOvJ/3pyFMV0ghlX4jSiw=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=aws-cloudtrail-logs-123456789012-cloudlake.s3.eu-central-1.amazonaws.com, requestParameters.x-amz-server-side-encryption=AES256, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=f7f916ab-0dd4-4e99-b522-b415b9c80458, requestParameters.key=AWSLogs/o-re4vuxlksb/668688824393/CloudTrail/eu-central-1/2023/11/04/668688824393_CloudTrail_eu-central-1_20231104T2230Z_cvjIkDeGdNDHOgZs.json.gz, requestParameters.bucketName=aws-cloudtrail-logs-123456789012-cloudlake, responseElements.x-amz-server-side-encryption=AES256, recipientAccountId=123456789012, managementEvent=false, additionalEventData.bytesTransferredIn=1199}", "region": "eu-central-1", - "accountid": "654338056632", + "accountid": "123456789012", "eventday": "20231104", "query_execution_id": "b3c194e7-6580-421c-81fa-4b409e1ba04f" }, @@ -142,7 +142,7 @@ "actor": "{user={type=AWSService, name=null, uid=null, uuid=null, account_uid=null, credential_uid=null}, session={created_time=null, mfa=null, issuer=null}, invoked_by=cloudtrail.amazonaws.com, idp={name=null}}", "http_request": "{user_agent=cloudtrail.amazonaws.com}", "src_endpoint": "{uid=null, ip=null, domain=cloudtrail.amazonaws.com}", - "resources": "[{uid=arn:aws:s3:::aws-cloudtrail-logs-654338056632-cloudlake/AWSLogs/o-re4vuxlksb/654338056632/CloudTrail/eu-central-1/2023/11/04/654338056632_CloudTrail_eu-central-1_20231104T2235Z_yvpVSHH3613gs2AK.json.gz, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::aws-cloudtrail-logs-654338056632-cloudlake, account_uid=654338056632, type=AWS::S3::Bucket}]", + "resources": "[{uid=arn:aws:s3:::aws-cloudtrail-logs-123456789012-cloudlake/AWSLogs/o-re4vuxlksb/123456789012/CloudTrail/eu-central-1/2023/11/04/123456789012_CloudTrail_eu-central-1_20231104T2235Z_yvpVSHH3613gs2AK.json.gz, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::aws-cloudtrail-logs-123456789012-cloudlake, account_uid=123456789012, type=AWS::S3::Bucket}]", "class_name": "API Activity", "class_uid": "3005", "category_name": "Audit Activity", @@ -155,9 +155,9 @@ "type_name": "API Activity: Update", "status": "Success", "status_id": "1", - "unmapped": "{additionalEventData.SSEApplied=SSE_S3, requestParameters.x-amz-acl=bucket-owner-full-control, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, additionalEventData.x-amz-id-2=xm6fkjpc6B9awVdyl1jxnkfp+1boyZ3slsj3MtybxFfeJ+fhC84Il8k2jKEVlK91DdoRzM+RVIw=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=aws-cloudtrail-logs-654338056632-cloudlake.s3.eu-central-1.amazonaws.com, requestParameters.x-amz-server-side-encryption=AES256, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=083a753d-7aae-4dcb-b5d9-703cfab873b4, requestParameters.key=AWSLogs/o-re4vuxlksb/654338056632/CloudTrail/eu-central-1/2023/11/04/654338056632_CloudTrail_eu-central-1_20231104T2235Z_yvpVSHH3613gs2AK.json.gz, requestParameters.bucketName=aws-cloudtrail-logs-654338056632-cloudlake, responseElements.x-amz-server-side-encryption=AES256, recipientAccountId=654338056632, managementEvent=false, additionalEventData.bytesTransferredIn=1556}", + "unmapped": "{additionalEventData.SSEApplied=SSE_S3, requestParameters.x-amz-acl=bucket-owner-full-control, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, additionalEventData.x-amz-id-2=xm6fkjpc6B9awVdyl1jxnkfp+1boyZ3slsj3MtybxFfeJ+fhC84Il8k2jKEVlK91DdoRzM+RVIw=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=aws-cloudtrail-logs-123456789012-cloudlake.s3.eu-central-1.amazonaws.com, requestParameters.x-amz-server-side-encryption=AES256, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=083a753d-7aae-4dcb-b5d9-703cfab873b4, requestParameters.key=AWSLogs/o-re4vuxlksb/123456789012/CloudTrail/eu-central-1/2023/11/04/123456789012_CloudTrail_eu-central-1_20231104T2235Z_yvpVSHH3613gs2AK.json.gz, requestParameters.bucketName=aws-cloudtrail-logs-123456789012-cloudlake, responseElements.x-amz-server-side-encryption=AES256, recipientAccountId=123456789012, managementEvent=false, additionalEventData.bytesTransferredIn=1556}", "region": "eu-central-1", - "accountid": "654338056632", + "accountid": "123456789012", "eventday": "20231104", "query_execution_id": "b3c194e7-6580-421c-81fa-4b409e1ba04f" }, @@ -169,7 +169,7 @@ "actor": "{user={type=AWSService, name=null, uid=null, uuid=null, account_uid=null, credential_uid=null}, session={created_time=null, mfa=null, issuer=null}, invoked_by=cloudtrail.amazonaws.com, idp={name=null}}", "http_request": "{user_agent=cloudtrail.amazonaws.com}", "src_endpoint": "{uid=null, ip=null, domain=cloudtrail.amazonaws.com}", - "resources": "[{uid=arn:aws:s3:::aws-cloudtrail-logs-654338056632-cloudlake/AWSLogs/o-re4vuxlksb/668688824393/CloudTrail/eu-central-1/2023/11/07/668688824393_CloudTrail_eu-central-1_20231107T0740Z_jtWHnUtmZvFb95ZI.json.gz, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::aws-cloudtrail-logs-654338056632-cloudlake, account_uid=654338056632, type=AWS::S3::Bucket}]", + "resources": "[{uid=arn:aws:s3:::aws-cloudtrail-logs-123456789012-cloudlake/AWSLogs/o-re4vuxlksb/668688824393/CloudTrail/eu-central-1/2023/11/07/668688824393_CloudTrail_eu-central-1_20231107T0740Z_jtWHnUtmZvFb95ZI.json.gz, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::aws-cloudtrail-logs-123456789012-cloudlake, account_uid=123456789012, type=AWS::S3::Bucket}]", "class_name": "API Activity", "class_uid": "3005", "category_name": "Audit Activity", @@ -182,9 +182,9 @@ "type_name": "API Activity: Update", "status": "Success", "status_id": "1", - "unmapped": "{additionalEventData.SSEApplied=SSE_S3, requestParameters.x-amz-acl=bucket-owner-full-control, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, additionalEventData.x-amz-id-2=o5fB4Haj65lh3PSSxZ3GImGDkSUUQ2Vy0qEH2kifNcpzIn9KWAL4VECS2HMMq8abRt7X9q1X3W9r0tioo1ytzQ==, readOnly=false, eventType=AwsApiCall, requestParameters.Host=aws-cloudtrail-logs-654338056632-cloudlake.s3.eu-central-1.amazonaws.com, requestParameters.x-amz-server-side-encryption=AES256, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=b1991000-4fff-4444-a70e-c615aae8db9e, requestParameters.key=AWSLogs/o-re4vuxlksb/668688824393/CloudTrail/eu-central-1/2023/11/07/668688824393_CloudTrail_eu-central-1_20231107T0740Z_jtWHnUtmZvFb95ZI.json.gz, requestParameters.bucketName=aws-cloudtrail-logs-654338056632-cloudlake, responseElements.x-amz-server-side-encryption=AES256, recipientAccountId=654338056632, managementEvent=false, additionalEventData.bytesTransferredIn=1580}", + "unmapped": "{additionalEventData.SSEApplied=SSE_S3, requestParameters.x-amz-acl=bucket-owner-full-control, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, additionalEventData.x-amz-id-2=o5fB4Haj65lh3PSSxZ3GImGDkSUUQ2Vy0qEH2kifNcpzIn9KWAL4VECS2HMMq8abRt7X9q1X3W9r0tioo1ytzQ==, readOnly=false, eventType=AwsApiCall, requestParameters.Host=aws-cloudtrail-logs-123456789012-cloudlake.s3.eu-central-1.amazonaws.com, requestParameters.x-amz-server-side-encryption=AES256, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=b1991000-4fff-4444-a70e-c615aae8db9e, requestParameters.key=AWSLogs/o-re4vuxlksb/668688824393/CloudTrail/eu-central-1/2023/11/07/668688824393_CloudTrail_eu-central-1_20231107T0740Z_jtWHnUtmZvFb95ZI.json.gz, requestParameters.bucketName=aws-cloudtrail-logs-123456789012-cloudlake, responseElements.x-amz-server-side-encryption=AES256, recipientAccountId=123456789012, managementEvent=false, additionalEventData.bytesTransferredIn=1580}", "region": "eu-central-1", - "accountid": "654338056632", + "accountid": "123456789012", "eventday": "20231107", "query_execution_id": "b3c194e7-6580-421c-81fa-4b409e1ba04f" }, @@ -196,7 +196,7 @@ "actor": "{user={type=AWSService, name=null, uid=null, uuid=null, account_uid=null, credential_uid=null}, session={created_time=null, mfa=null, issuer=null}, invoked_by=cloudtrail.amazonaws.com, idp={name=null}}", "http_request": "{user_agent=cloudtrail.amazonaws.com}", "src_endpoint": "{uid=null, ip=null, domain=cloudtrail.amazonaws.com}", - "resources": "[{uid=arn:aws:s3:::aws-cloudtrail-logs-654338056632-cloudlake/AWSLogs/o-re4vuxlksb/668688824393/CloudTrail/eu-central-1/2023/11/07/668688824393_CloudTrail_eu-central-1_20231107T0740Z_xvmpLIGhOzmt4n3B.json.gz, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::aws-cloudtrail-logs-654338056632-cloudlake, account_uid=654338056632, type=AWS::S3::Bucket}]", + "resources": "[{uid=arn:aws:s3:::aws-cloudtrail-logs-123456789012-cloudlake/AWSLogs/o-re4vuxlksb/668688824393/CloudTrail/eu-central-1/2023/11/07/668688824393_CloudTrail_eu-central-1_20231107T0740Z_xvmpLIGhOzmt4n3B.json.gz, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::aws-cloudtrail-logs-123456789012-cloudlake, account_uid=123456789012, type=AWS::S3::Bucket}]", "class_name": "API Activity", "class_uid": "3005", "category_name": "Audit Activity", @@ -209,9 +209,9 @@ "type_name": "API Activity: Update", "status": "Success", "status_id": "1", - "unmapped": "{additionalEventData.SSEApplied=SSE_S3, requestParameters.x-amz-acl=bucket-owner-full-control, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, additionalEventData.x-amz-id-2=iypPsK3E1swmRKTBokcowNGxPCNWvgnjXmMyOOvajrl8bGqbQVTmPUbXGLOYR2z553KaKn/HZF0=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=aws-cloudtrail-logs-654338056632-cloudlake.s3.eu-central-1.amazonaws.com, requestParameters.x-amz-server-side-encryption=AES256, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=d0089b3a-0175-4732-8726-aa8b83557c19, requestParameters.key=AWSLogs/o-re4vuxlksb/668688824393/CloudTrail/eu-central-1/2023/11/07/668688824393_CloudTrail_eu-central-1_20231107T0740Z_xvmpLIGhOzmt4n3B.json.gz, requestParameters.bucketName=aws-cloudtrail-logs-654338056632-cloudlake, responseElements.x-amz-server-side-encryption=AES256, recipientAccountId=654338056632, managementEvent=false, additionalEventData.bytesTransferredIn=2276}", + "unmapped": "{additionalEventData.SSEApplied=SSE_S3, requestParameters.x-amz-acl=bucket-owner-full-control, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, additionalEventData.x-amz-id-2=iypPsK3E1swmRKTBokcowNGxPCNWvgnjXmMyOOvajrl8bGqbQVTmPUbXGLOYR2z553KaKn/HZF0=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=aws-cloudtrail-logs-123456789012-cloudlake.s3.eu-central-1.amazonaws.com, requestParameters.x-amz-server-side-encryption=AES256, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=d0089b3a-0175-4732-8726-aa8b83557c19, requestParameters.key=AWSLogs/o-re4vuxlksb/668688824393/CloudTrail/eu-central-1/2023/11/07/668688824393_CloudTrail_eu-central-1_20231107T0740Z_xvmpLIGhOzmt4n3B.json.gz, requestParameters.bucketName=aws-cloudtrail-logs-123456789012-cloudlake, responseElements.x-amz-server-side-encryption=AES256, recipientAccountId=123456789012, managementEvent=false, additionalEventData.bytesTransferredIn=2276}", "region": "eu-central-1", - "accountid": "654338056632", + "accountid": "123456789012", "eventday": "20231107", "query_execution_id": "b3c194e7-6580-421c-81fa-4b409e1ba04f" }, @@ -223,7 +223,7 @@ "actor": "{user={type=AWSService, name=null, uid=null, uuid=null, account_uid=null, credential_uid=null}, session={created_time=null, mfa=null, issuer=null}, invoked_by=s3.amazonaws.com, idp={name=null}}", "http_request": "{user_agent=s3.amazonaws.com}", "src_endpoint": "{uid=vpce-59a25a30, ip=null, domain=s3.amazonaws.com}", - "resources": "[{uid=arn:aws:s3:::test-log-collector/2023-11-07-07-41-57-110CCCE05A2BEE37, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::test-log-collector, account_uid=654338056632, type=AWS::S3::Bucket}]", + "resources": "[{uid=arn:aws:s3:::test-log-collector/2023-11-07-07-41-57-110CCCE05A2BEE37, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::test-log-collector, account_uid=123456789012, type=AWS::S3::Bucket}]", "class_name": "API Activity", "class_uid": "3005", "category_name": "Audit Activity", @@ -236,9 +236,9 @@ "type_name": "API Activity: Update", "status": "Success", "status_id": "1", - "unmapped": "{additionalEventData.SSEApplied=Default_SSE_S3, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, responseElements.x-amz-version-id=193ieSvoNFv1o.tUJKgchRloNHbERbcu, additionalEventData.x-amz-id-2=jIV/CP92IrpLpnNRe2zwJj9+c9Rg3EsUyM2AIRSYR19hm8Umi4gnDdP9NrDJszZO4EkOgWiJQz8=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=s3.eu-central-1.amazonaws.com, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=80ec7a41-6396-453a-8ec9-993c46b371be, requestParameters.key=2023-11-07-07-41-57-110CCCE05A2BEE37, requestParameters.bucketName=test-log-collector, responseElements.x-amz-server-side-encryption=AES256, responseElements.x-amz-expiration=expiry-date=\"Wed, 15 Nov 2023 00:00:00 GMT\", rule-id=\"Delete-older-the-7-days\", recipientAccountId=654338056632, managementEvent=false, additionalEventData.bytesTransferredIn=480}", + "unmapped": "{additionalEventData.SSEApplied=Default_SSE_S3, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, responseElements.x-amz-version-id=193ieSvoNFv1o.tUJKgchRloNHbERbcu, additionalEventData.x-amz-id-2=jIV/CP92IrpLpnNRe2zwJj9+c9Rg3EsUyM2AIRSYR19hm8Umi4gnDdP9NrDJszZO4EkOgWiJQz8=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=s3.eu-central-1.amazonaws.com, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=80ec7a41-6396-453a-8ec9-993c46b371be, requestParameters.key=2023-11-07-07-41-57-110CCCE05A2BEE37, requestParameters.bucketName=test-log-collector, responseElements.x-amz-server-side-encryption=AES256, responseElements.x-amz-expiration=expiry-date=\"Wed, 15 Nov 2023 00:00:00 GMT\", rule-id=\"Delete-older-the-7-days\", recipientAccountId=123456789012, managementEvent=false, additionalEventData.bytesTransferredIn=480}", "region": "eu-central-1", - "accountid": "654338056632", + "accountid": "123456789012", "eventday": "20231107", "query_execution_id": "b3c194e7-6580-421c-81fa-4b409e1ba04f" }, @@ -250,7 +250,7 @@ "actor": "{user={type=AWSService, name=null, uid=null, uuid=null, account_uid=null, credential_uid=null}, session={created_time=null, mfa=null, issuer=null}, invoked_by=s3.amazonaws.com, idp={name=null}}", "http_request": "{user_agent=s3.amazonaws.com}", "src_endpoint": "{uid=vpce-59a25a30, ip=null, domain=s3.amazonaws.com}", - "resources": "[{uid=arn:aws:s3:::test-log-collector/2023-11-07-07-42-28-FC0CDE158967D4CF, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::test-log-collector, account_uid=654338056632, type=AWS::S3::Bucket}]", + "resources": "[{uid=arn:aws:s3:::test-log-collector/2023-11-07-07-42-28-FC0CDE158967D4CF, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::test-log-collector, account_uid=123456789012, type=AWS::S3::Bucket}]", "class_name": "API Activity", "class_uid": "3005", "category_name": "Audit Activity", @@ -263,9 +263,9 @@ "type_name": "API Activity: Update", "status": "Success", "status_id": "1", - "unmapped": "{additionalEventData.SSEApplied=Default_SSE_S3, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, responseElements.x-amz-version-id=IBZgLYMsVU50YUHYOt9rQ1R28oTy_rHd, additionalEventData.x-amz-id-2=TCnuNhf35xx4buXCo34P8TdlAV4hkDit07I8iUuqpjcyvWOXRGuPAuFGd+5tbmPRXN5+MbY57zU=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=s3.eu-central-1.amazonaws.com, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=93494c71-ec89-4b9a-8607-6f8ff7460ff9, requestParameters.key=2023-11-07-07-42-28-FC0CDE158967D4CF, requestParameters.bucketName=test-log-collector, responseElements.x-amz-server-side-encryption=AES256, responseElements.x-amz-expiration=expiry-date=\"Wed, 15 Nov 2023 00:00:00 GMT\", rule-id=\"Delete-older-the-7-days\", recipientAccountId=654338056632, managementEvent=false, additionalEventData.bytesTransferredIn=960}", + "unmapped": "{additionalEventData.SSEApplied=Default_SSE_S3, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, responseElements.x-amz-version-id=IBZgLYMsVU50YUHYOt9rQ1R28oTy_rHd, additionalEventData.x-amz-id-2=TCnuNhf35xx4buXCo34P8TdlAV4hkDit07I8iUuqpjcyvWOXRGuPAuFGd+5tbmPRXN5+MbY57zU=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=s3.eu-central-1.amazonaws.com, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=93494c71-ec89-4b9a-8607-6f8ff7460ff9, requestParameters.key=2023-11-07-07-42-28-FC0CDE158967D4CF, requestParameters.bucketName=test-log-collector, responseElements.x-amz-server-side-encryption=AES256, responseElements.x-amz-expiration=expiry-date=\"Wed, 15 Nov 2023 00:00:00 GMT\", rule-id=\"Delete-older-the-7-days\", recipientAccountId=123456789012, managementEvent=false, additionalEventData.bytesTransferredIn=960}", "region": "eu-central-1", - "accountid": "654338056632", + "accountid": "123456789012", "eventday": "20231107", "query_execution_id": "b3c194e7-6580-421c-81fa-4b409e1ba04f" } diff --git a/Packs/AWS-Athena/Integrations/AWS-Athena/test_data/expected_hr/get_query_results_command.txt b/Packs/AWS-Athena/Integrations/AWS-Athena/test_data/expected_hr/get_query_results_command.txt index dadfbb63bbcd..5dbbc3075aa7 100644 --- a/Packs/AWS-Athena/Integrations/AWS-Athena/test_data/expected_hr/get_query_results_command.txt +++ b/Packs/AWS-Athena/Integrations/AWS-Athena/test_data/expected_hr/get_query_results_command.txt @@ -1,13 +1,13 @@ ### AWS Athena Query Results |accountid|activity_id|activity_name|actor|api|category_name|category_uid|class_name|class_uid|cloud|eventday|http_request|metadata|query_execution_id|region|resources|severity|severity_id|src_endpoint|status|status_id|time|type_name|type_uid|unmapped| |---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---| -| 654338056632 | 3 | Update | {user={type=AWSService, name=null, uid=null, uuid=null, account_uid=null, credential_uid=null}, session={created_time=null, mfa=null, issuer=null}, invoked_by=s3.amazonaws.com, idp={name=null}} | {response={error=null, message=null}, operation=PutObject, version=null, service={name=s3.amazonaws.com}, request={uid=5AV2YZSR7D9DFDW8}} | Audit Activity | 3 | API Activity | 3005 | {region=eu-central-1, provider=AWS} | 20231107 | {user_agent=s3.amazonaws.com} | {product={version=1.09, name=CloudTrail, vendor_name=AWS, feature={name=Data}}, uid=7c33bcd3-0252-4b28-b2c7-7f38ed881796, profiles=[cloud], version=1.0.0-rc.2} | b3c194e7-6580-421c-81fa-4b409e1ba04f | eu-central-1 | [{uid=arn:aws:s3:::test-log-collector/2023-11-07-07-40-08-CD1B1BC0934C71D4, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::test-log-collector, account_uid=654338056632, type=AWS::S3::Bucket}] | Informational | 1 | {uid=vpce-59a25a30, ip=null, domain=s3.amazonaws.com} | Success | 1 | 1699342808000 | API Activity: Update | 300503 | {additionalEventData.SSEApplied=Default_SSE_S3, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, responseElements.x-amz-version-id=CSwJV1zFxiBSHjrTGaIUH5FMMVcgl05W, additionalEventData.x-amz-id-2=71IAMvFJ3O5bJRBlpJCB3l0B8CzNy5sG7BECw2I1R4oHyvkV2FKixZqmYAir4Y5h5ldtxWA+xlk=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=s3.eu-central-1.amazonaws.com, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=6ffdc2dd-05ca-483e-8baa-842949fddced, requestParameters.key=2023-11-07-07-40-08-CD1B1BC0934C71D4, requestParameters.bucketName=test-log-collector, responseElements.x-amz-server-side-encryption=AES256, responseElements.x-amz-expiration=expiry-date="Wed, 15 Nov 2023 00:00:00 GMT", rule-id="Delete-older-the-7-days", recipientAccountId=654338056632, managementEvent=false, additionalEventData.bytesTransferredIn=480} | -| 654338056632 | 3 | Update | {user={type=AWSService, name=null, uid=null, uuid=null, account_uid=null, credential_uid=null}, session={created_time=null, mfa=null, issuer=null}, invoked_by=cloudtrail.amazonaws.com, idp={name=null}} | {response={error=null, message=null}, operation=PutObject, version=null, service={name=s3.amazonaws.com}, request={uid=BRW2SJ5SBFD7T91W}} | Audit Activity | 3 | API Activity | 3005 | {region=eu-central-1, provider=AWS} | 20231107 | {user_agent=cloudtrail.amazonaws.com} | {product={version=1.09, name=CloudTrail, vendor_name=AWS, feature={name=Data}}, uid=69d54eee-2c1c-4f51-b89f-45c7029695c6, profiles=[cloud], version=1.0.0-rc.2} | b3c194e7-6580-421c-81fa-4b409e1ba04f | eu-central-1 | [{uid=arn:aws:s3:::aws-cloudtrail-logs-654338056632-cloudlake/AWSLogs/o-re4vuxlksb/654338056632/CloudTrail/eu-central-1/2023/11/07/654338056632_CloudTrail_eu-central-1_20231107T0740Z_dimNGWjAynOPPS1e.json.gz, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::aws-cloudtrail-logs-654338056632-cloudlake, account_uid=654338056632, type=AWS::S3::Bucket}] | Informational | 1 | {uid=null, ip=null, domain=cloudtrail.amazonaws.com} | Success | 1 | 1699342833000 | API Activity: Update | 300503 | {additionalEventData.SSEApplied=SSE_S3, requestParameters.x-amz-acl=bucket-owner-full-control, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, additionalEventData.x-amz-id-2=CKIV9mfKLdZHOitE2UvDr21Km4whgr92dar3i5Ew4/upKPfXc97MP45lpxnGo1mhPW7RQgvuQEs=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=aws-cloudtrail-logs-654338056632-cloudlake.s3.eu-central-1.amazonaws.com, requestParameters.x-amz-server-side-encryption=AES256, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=cd4f5d26-4491-40fc-a7d9-cb10f3e99ed3, requestParameters.key=AWSLogs/o-re4vuxlksb/654338056632/CloudTrail/eu-central-1/2023/11/07/654338056632_CloudTrail_eu-central-1_20231107T0740Z_dimNGWjAynOPPS1e.json.gz, requestParameters.bucketName=aws-cloudtrail-logs-654338056632-cloudlake, responseElements.x-amz-server-side-encryption=AES256, recipientAccountId=654338056632, managementEvent=false, additionalEventData.bytesTransferredIn=2347} | -| 654338056632 | 3 | Update | {user={type=AWSService, name=null, uid=null, uuid=null, account_uid=null, credential_uid=null}, session={created_time=null, mfa=null, issuer=null}, invoked_by=s3.amazonaws.com, idp={name=null}} | {response={error=null, message=null}, operation=PutObject, version=null, service={name=s3.amazonaws.com}, request={uid=55TVMR6HYD2ABTWB}} | Audit Activity | 3 | API Activity | 3005 | {region=eu-central-1, provider=AWS} | 20231107 | {user_agent=s3.amazonaws.com} | {product={version=1.09, name=CloudTrail, vendor_name=AWS, feature={name=Data}}, uid=2a0b2b1a-bff6-4a89-93e5-801e3fdf3b92, profiles=[cloud], version=1.0.0-rc.2} | b3c194e7-6580-421c-81fa-4b409e1ba04f | eu-central-1 | [{uid=arn:aws:s3:::test-log-collector/2023-11-07-07-39-32-43DC7FEEAE7DFCA6, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::test-log-collector, account_uid=654338056632, type=AWS::S3::Bucket}] | Informational | 1 | {uid=vpce-59a25a30, ip=null, domain=s3.amazonaws.com} | Success | 1 | 1699342772000 | API Activity: Update | 300503 | {additionalEventData.SSEApplied=Default_SSE_S3, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, responseElements.x-amz-version-id=LXBDlV4KJJB56OUOvwPNlb1v4Re5gpKH, additionalEventData.x-amz-id-2=0/+WsTcJgOlNKdHK/L2FmlK7IyBHUCeZDaXhRKtYAlWrPy0oTMVVtX41yxCh3HE5s2YMLFpwyGc=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=s3.eu-central-1.amazonaws.com, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=ae9c5147-998a-4a83-ae76-dd1916795763, requestParameters.key=2023-11-07-07-39-32-43DC7FEEAE7DFCA6, requestParameters.bucketName=test-log-collector, responseElements.x-amz-server-side-encryption=AES256, responseElements.x-amz-expiration=expiry-date="Wed, 15 Nov 2023 00:00:00 GMT", rule-id="Delete-older-the-7-days", recipientAccountId=654338056632, managementEvent=false, additionalEventData.bytesTransferredIn=480} | -| 654338056632 | 3 | Update | {user={type=AWSService, name=null, uid=null, uuid=null, account_uid=null, credential_uid=null}, session={created_time=null, mfa=null, issuer=null}, invoked_by=cloudtrail.amazonaws.com, idp={name=null}} | {response={error=null, message=null}, operation=PutObject, version=null, service={name=s3.amazonaws.com}, request={uid=5E1S778MRDJEDVSR}} | Audit Activity | 3 | API Activity | 3005 | {region=eu-central-1, provider=AWS} | 20231104 | {user_agent=cloudtrail.amazonaws.com} | {product={version=1.09, name=CloudTrail, vendor_name=AWS, feature={name=Data}}, uid=fee096f2-611d-4b3c-b092-ea06e8a527ca, profiles=[cloud], version=1.0.0-rc.2} | b3c194e7-6580-421c-81fa-4b409e1ba04f | eu-central-1 | [{uid=arn:aws:s3:::aws-cloudtrail-logs-654338056632-cloudlake/AWSLogs/o-re4vuxlksb/654338056632/CloudTrail/eu-central-1/2023/11/04/654338056632_CloudTrail_eu-central-1_20231104T2230Z_Ei3HUNUHdxFCuo9l.json.gz, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::aws-cloudtrail-logs-654338056632-cloudlake, account_uid=654338056632, type=AWS::S3::Bucket}] | Informational | 1 | {uid=null, ip=null, domain=cloudtrail.amazonaws.com} | Success | 1 | 1699137065000 | API Activity: Update | 300503 | {additionalEventData.SSEApplied=SSE_S3, requestParameters.x-amz-acl=bucket-owner-full-control, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, additionalEventData.x-amz-id-2=RITtRiXX1DGn4aCJ1AguFwc0Ux/HS6LSgIjiGlJBFdeFWMwmqQk1TlibDKq5kIA2xrSZc/qyl8w=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=aws-cloudtrail-logs-654338056632-cloudlake.s3.eu-central-1.amazonaws.com, requestParameters.x-amz-server-side-encryption=AES256, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=150e3c90-ce64-4311-b9de-67ef7077751a, requestParameters.key=AWSLogs/o-re4vuxlksb/654338056632/CloudTrail/eu-central-1/2023/11/04/654338056632_CloudTrail_eu-central-1_20231104T2230Z_Ei3HUNUHdxFCuo9l.json.gz, requestParameters.bucketName=aws-cloudtrail-logs-654338056632-cloudlake, responseElements.x-amz-server-side-encryption=AES256, recipientAccountId=654338056632, managementEvent=false, additionalEventData.bytesTransferredIn=1317} | -| 654338056632 | 3 | Update | {user={type=AWSService, name=null, uid=null, uuid=null, account_uid=null, credential_uid=null}, session={created_time=null, mfa=null, issuer=null}, invoked_by=cloudtrail.amazonaws.com, idp={name=null}} | {response={error=null, message=null}, operation=PutObject, version=null, service={name=s3.amazonaws.com}, request={uid=VDKRDR9XNA8H2MF8}} | Audit Activity | 3 | API Activity | 3005 | {region=eu-central-1, provider=AWS} | 20231104 | {user_agent=cloudtrail.amazonaws.com} | {product={version=1.09, name=CloudTrail, vendor_name=AWS, feature={name=Data}}, uid=68481d66-4a3e-42fe-a878-a1e6a7176c16, profiles=[cloud], version=1.0.0-rc.2} | b3c194e7-6580-421c-81fa-4b409e1ba04f | eu-central-1 | [{uid=arn:aws:s3:::aws-cloudtrail-logs-654338056632-cloudlake/AWSLogs/o-re4vuxlksb/668688824393/CloudTrail/eu-central-1/2023/11/04/668688824393_CloudTrail_eu-central-1_20231104T2230Z_cvjIkDeGdNDHOgZs.json.gz, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::aws-cloudtrail-logs-654338056632-cloudlake, account_uid=654338056632, type=AWS::S3::Bucket}] | Informational | 1 | {uid=null, ip=null, domain=cloudtrail.amazonaws.com} | Success | 1 | 1699137170000 | API Activity: Update | 300503 | {additionalEventData.SSEApplied=SSE_S3, requestParameters.x-amz-acl=bucket-owner-full-control, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, additionalEventData.x-amz-id-2=p+d+4x2c5Cq78w5YiikRKYtXsRxTFKFiklo3uPsWV9VkYTbLqxQxlKSOvJ/3pyFMV0ghlX4jSiw=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=aws-cloudtrail-logs-654338056632-cloudlake.s3.eu-central-1.amazonaws.com, requestParameters.x-amz-server-side-encryption=AES256, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=f7f916ab-0dd4-4e99-b522-b415b9c80458, requestParameters.key=AWSLogs/o-re4vuxlksb/668688824393/CloudTrail/eu-central-1/2023/11/04/668688824393_CloudTrail_eu-central-1_20231104T2230Z_cvjIkDeGdNDHOgZs.json.gz, requestParameters.bucketName=aws-cloudtrail-logs-654338056632-cloudlake, responseElements.x-amz-server-side-encryption=AES256, recipientAccountId=654338056632, managementEvent=false, additionalEventData.bytesTransferredIn=1199} | -| 654338056632 | 3 | Update | {user={type=AWSService, name=null, uid=null, uuid=null, account_uid=null, credential_uid=null}, session={created_time=null, mfa=null, issuer=null}, invoked_by=cloudtrail.amazonaws.com, idp={name=null}} | {response={error=null, message=null}, operation=PutObject, version=null, service={name=s3.amazonaws.com}, request={uid=PN8HM14HVKG8ED12}} | Audit Activity | 3 | API Activity | 3005 | {region=eu-central-1, provider=AWS} | 20231104 | {user_agent=cloudtrail.amazonaws.com} | {product={version=1.09, name=CloudTrail, vendor_name=AWS, feature={name=Data}}, uid=6a5583e7-0463-439a-89c6-5431d3cd58fe, profiles=[cloud], version=1.0.0-rc.2} | b3c194e7-6580-421c-81fa-4b409e1ba04f | eu-central-1 | [{uid=arn:aws:s3:::aws-cloudtrail-logs-654338056632-cloudlake/AWSLogs/o-re4vuxlksb/654338056632/CloudTrail/eu-central-1/2023/11/04/654338056632_CloudTrail_eu-central-1_20231104T2235Z_yvpVSHH3613gs2AK.json.gz, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::aws-cloudtrail-logs-654338056632-cloudlake, account_uid=654338056632, type=AWS::S3::Bucket}] | Informational | 1 | {uid=null, ip=null, domain=cloudtrail.amazonaws.com} | Success | 1 | 1699137247000 | API Activity: Update | 300503 | {additionalEventData.SSEApplied=SSE_S3, requestParameters.x-amz-acl=bucket-owner-full-control, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, additionalEventData.x-amz-id-2=xm6fkjpc6B9awVdyl1jxnkfp+1boyZ3slsj3MtybxFfeJ+fhC84Il8k2jKEVlK91DdoRzM+RVIw=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=aws-cloudtrail-logs-654338056632-cloudlake.s3.eu-central-1.amazonaws.com, requestParameters.x-amz-server-side-encryption=AES256, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=083a753d-7aae-4dcb-b5d9-703cfab873b4, requestParameters.key=AWSLogs/o-re4vuxlksb/654338056632/CloudTrail/eu-central-1/2023/11/04/654338056632_CloudTrail_eu-central-1_20231104T2235Z_yvpVSHH3613gs2AK.json.gz, requestParameters.bucketName=aws-cloudtrail-logs-654338056632-cloudlake, responseElements.x-amz-server-side-encryption=AES256, recipientAccountId=654338056632, managementEvent=false, additionalEventData.bytesTransferredIn=1556} | -| 654338056632 | 3 | Update | {user={type=AWSService, name=null, uid=null, uuid=null, account_uid=null, credential_uid=null}, session={created_time=null, mfa=null, issuer=null}, invoked_by=cloudtrail.amazonaws.com, idp={name=null}} | {response={error=null, message=null}, operation=PutObject, version=null, service={name=s3.amazonaws.com}, request={uid=J3HPX1A1NNHGKFKH}} | Audit Activity | 3 | API Activity | 3005 | {region=eu-central-1, provider=AWS} | 20231107 | {user_agent=cloudtrail.amazonaws.com} | {product={version=1.09, name=CloudTrail, vendor_name=AWS, feature={name=Data}}, uid=e3cc1c86-950f-4201-a362-3f8f68631a83, profiles=[cloud], version=1.0.0-rc.2} | b3c194e7-6580-421c-81fa-4b409e1ba04f | eu-central-1 | [{uid=arn:aws:s3:::aws-cloudtrail-logs-654338056632-cloudlake/AWSLogs/o-re4vuxlksb/668688824393/CloudTrail/eu-central-1/2023/11/07/668688824393_CloudTrail_eu-central-1_20231107T0740Z_jtWHnUtmZvFb95ZI.json.gz, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::aws-cloudtrail-logs-654338056632-cloudlake, account_uid=654338056632, type=AWS::S3::Bucket}] | Informational | 1 | {uid=null, ip=null, domain=cloudtrail.amazonaws.com} | Success | 1 | 1699342905000 | API Activity: Update | 300503 | {additionalEventData.SSEApplied=SSE_S3, requestParameters.x-amz-acl=bucket-owner-full-control, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, additionalEventData.x-amz-id-2=o5fB4Haj65lh3PSSxZ3GImGDkSUUQ2Vy0qEH2kifNcpzIn9KWAL4VECS2HMMq8abRt7X9q1X3W9r0tioo1ytzQ==, readOnly=false, eventType=AwsApiCall, requestParameters.Host=aws-cloudtrail-logs-654338056632-cloudlake.s3.eu-central-1.amazonaws.com, requestParameters.x-amz-server-side-encryption=AES256, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=b1991000-4fff-4444-a70e-c615aae8db9e, requestParameters.key=AWSLogs/o-re4vuxlksb/668688824393/CloudTrail/eu-central-1/2023/11/07/668688824393_CloudTrail_eu-central-1_20231107T0740Z_jtWHnUtmZvFb95ZI.json.gz, requestParameters.bucketName=aws-cloudtrail-logs-654338056632-cloudlake, responseElements.x-amz-server-side-encryption=AES256, recipientAccountId=654338056632, managementEvent=false, additionalEventData.bytesTransferredIn=1580} | -| 654338056632 | 3 | Update | {user={type=AWSService, name=null, uid=null, uuid=null, account_uid=null, credential_uid=null}, session={created_time=null, mfa=null, issuer=null}, invoked_by=cloudtrail.amazonaws.com, idp={name=null}} | {response={error=null, message=null}, operation=PutObject, version=null, service={name=s3.amazonaws.com}, request={uid=0J1R23EBRVY6T7ZJ}} | Audit Activity | 3 | API Activity | 3005 | {region=eu-central-1, provider=AWS} | 20231107 | {user_agent=cloudtrail.amazonaws.com} | {product={version=1.09, name=CloudTrail, vendor_name=AWS, feature={name=Data}}, uid=85ca9b31-7607-40bd-8f65-0e6f892550a5, profiles=[cloud], version=1.0.0-rc.2} | b3c194e7-6580-421c-81fa-4b409e1ba04f | eu-central-1 | [{uid=arn:aws:s3:::aws-cloudtrail-logs-654338056632-cloudlake/AWSLogs/o-re4vuxlksb/668688824393/CloudTrail/eu-central-1/2023/11/07/668688824393_CloudTrail_eu-central-1_20231107T0740Z_xvmpLIGhOzmt4n3B.json.gz, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::aws-cloudtrail-logs-654338056632-cloudlake, account_uid=654338056632, type=AWS::S3::Bucket}] | Informational | 1 | {uid=null, ip=null, domain=cloudtrail.amazonaws.com} | Success | 1 | 1699342908000 | API Activity: Update | 300503 | {additionalEventData.SSEApplied=SSE_S3, requestParameters.x-amz-acl=bucket-owner-full-control, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, additionalEventData.x-amz-id-2=iypPsK3E1swmRKTBokcowNGxPCNWvgnjXmMyOOvajrl8bGqbQVTmPUbXGLOYR2z553KaKn/HZF0=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=aws-cloudtrail-logs-654338056632-cloudlake.s3.eu-central-1.amazonaws.com, requestParameters.x-amz-server-side-encryption=AES256, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=d0089b3a-0175-4732-8726-aa8b83557c19, requestParameters.key=AWSLogs/o-re4vuxlksb/668688824393/CloudTrail/eu-central-1/2023/11/07/668688824393_CloudTrail_eu-central-1_20231107T0740Z_xvmpLIGhOzmt4n3B.json.gz, requestParameters.bucketName=aws-cloudtrail-logs-654338056632-cloudlake, responseElements.x-amz-server-side-encryption=AES256, recipientAccountId=654338056632, managementEvent=false, additionalEventData.bytesTransferredIn=2276} | -| 654338056632 | 3 | Update | {user={type=AWSService, name=null, uid=null, uuid=null, account_uid=null, credential_uid=null}, session={created_time=null, mfa=null, issuer=null}, invoked_by=s3.amazonaws.com, idp={name=null}} | {response={error=null, message=null}, operation=PutObject, version=null, service={name=s3.amazonaws.com}, request={uid=BW3N96A9B3H9MVBS}} | Audit Activity | 3 | API Activity | 3005 | {region=eu-central-1, provider=AWS} | 20231107 | {user_agent=s3.amazonaws.com} | {product={version=1.09, name=CloudTrail, vendor_name=AWS, feature={name=Data}}, uid=a7a07526-e890-4697-85a5-3d9cf4ab1e9b, profiles=[cloud], version=1.0.0-rc.2} | b3c194e7-6580-421c-81fa-4b409e1ba04f | eu-central-1 | [{uid=arn:aws:s3:::test-log-collector/2023-11-07-07-41-57-110CCCE05A2BEE37, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::test-log-collector, account_uid=654338056632, type=AWS::S3::Bucket}] | Informational | 1 | {uid=vpce-59a25a30, ip=null, domain=s3.amazonaws.com} | Success | 1 | 1699342917000 | API Activity: Update | 300503 | {additionalEventData.SSEApplied=Default_SSE_S3, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, responseElements.x-amz-version-id=193ieSvoNFv1o.tUJKgchRloNHbERbcu, additionalEventData.x-amz-id-2=jIV/CP92IrpLpnNRe2zwJj9+c9Rg3EsUyM2AIRSYR19hm8Umi4gnDdP9NrDJszZO4EkOgWiJQz8=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=s3.eu-central-1.amazonaws.com, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=80ec7a41-6396-453a-8ec9-993c46b371be, requestParameters.key=2023-11-07-07-41-57-110CCCE05A2BEE37, requestParameters.bucketName=test-log-collector, responseElements.x-amz-server-side-encryption=AES256, responseElements.x-amz-expiration=expiry-date="Wed, 15 Nov 2023 00:00:00 GMT", rule-id="Delete-older-the-7-days", recipientAccountId=654338056632, managementEvent=false, additionalEventData.bytesTransferredIn=480} | -| 654338056632 | 3 | Update | {user={type=AWSService, name=null, uid=null, uuid=null, account_uid=null, credential_uid=null}, session={created_time=null, mfa=null, issuer=null}, invoked_by=s3.amazonaws.com, idp={name=null}} | {response={error=null, message=null}, operation=PutObject, version=null, service={name=s3.amazonaws.com}, request={uid=8A48J06NSTRZZH41}} | Audit Activity | 3 | API Activity | 3005 | {region=eu-central-1, provider=AWS} | 20231107 | {user_agent=s3.amazonaws.com} | {product={version=1.09, name=CloudTrail, vendor_name=AWS, feature={name=Data}}, uid=8f60fe7d-1b50-42f5-956c-cd0f60fc98ed, profiles=[cloud], version=1.0.0-rc.2} | b3c194e7-6580-421c-81fa-4b409e1ba04f | eu-central-1 | [{uid=arn:aws:s3:::test-log-collector/2023-11-07-07-42-28-FC0CDE158967D4CF, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::test-log-collector, account_uid=654338056632, type=AWS::S3::Bucket}] | Informational | 1 | {uid=vpce-59a25a30, ip=null, domain=s3.amazonaws.com} | Success | 1 | 1699342948000 | API Activity: Update | 300503 | {additionalEventData.SSEApplied=Default_SSE_S3, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, responseElements.x-amz-version-id=IBZgLYMsVU50YUHYOt9rQ1R28oTy_rHd, additionalEventData.x-amz-id-2=TCnuNhf35xx4buXCo34P8TdlAV4hkDit07I8iUuqpjcyvWOXRGuPAuFGd+5tbmPRXN5+MbY57zU=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=s3.eu-central-1.amazonaws.com, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=93494c71-ec89-4b9a-8607-6f8ff7460ff9, requestParameters.key=2023-11-07-07-42-28-FC0CDE158967D4CF, requestParameters.bucketName=test-log-collector, responseElements.x-amz-server-side-encryption=AES256, responseElements.x-amz-expiration=expiry-date="Wed, 15 Nov 2023 00:00:00 GMT", rule-id="Delete-older-the-7-days", recipientAccountId=654338056632, managementEvent=false, additionalEventData.bytesTransferredIn=960} | +| 123456789012 | 3 | Update | {user={type=AWSService, name=null, uid=null, uuid=null, account_uid=null, credential_uid=null}, session={created_time=null, mfa=null, issuer=null}, invoked_by=s3.amazonaws.com, idp={name=null}} | {response={error=null, message=null}, operation=PutObject, version=null, service={name=s3.amazonaws.com}, request={uid=5AV2YZSR7D9DFDW8}} | Audit Activity | 3 | API Activity | 3005 | {region=eu-central-1, provider=AWS} | 20231107 | {user_agent=s3.amazonaws.com} | {product={version=1.09, name=CloudTrail, vendor_name=AWS, feature={name=Data}}, uid=7c33bcd3-0252-4b28-b2c7-7f38ed881796, profiles=[cloud], version=1.0.0-rc.2} | b3c194e7-6580-421c-81fa-4b409e1ba04f | eu-central-1 | [{uid=arn:aws:s3:::test-log-collector/2023-11-07-07-40-08-CD1B1BC0934C71D4, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::test-log-collector, account_uid=123456789012, type=AWS::S3::Bucket}] | Informational | 1 | {uid=vpce-59a25a30, ip=null, domain=s3.amazonaws.com} | Success | 1 | 1699342808000 | API Activity: Update | 300503 | {additionalEventData.SSEApplied=Default_SSE_S3, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, responseElements.x-amz-version-id=CSwJV1zFxiBSHjrTGaIUH5FMMVcgl05W, additionalEventData.x-amz-id-2=71IAMvFJ3O5bJRBlpJCB3l0B8CzNy5sG7BECw2I1R4oHyvkV2FKixZqmYAir4Y5h5ldtxWA+xlk=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=s3.eu-central-1.amazonaws.com, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=6ffdc2dd-05ca-483e-8baa-842949fddced, requestParameters.key=2023-11-07-07-40-08-CD1B1BC0934C71D4, requestParameters.bucketName=test-log-collector, responseElements.x-amz-server-side-encryption=AES256, responseElements.x-amz-expiration=expiry-date="Wed, 15 Nov 2023 00:00:00 GMT", rule-id="Delete-older-the-7-days", recipientAccountId=123456789012, managementEvent=false, additionalEventData.bytesTransferredIn=480} | +| 123456789012 | 3 | Update | {user={type=AWSService, name=null, uid=null, uuid=null, account_uid=null, credential_uid=null}, session={created_time=null, mfa=null, issuer=null}, invoked_by=cloudtrail.amazonaws.com, idp={name=null}} | {response={error=null, message=null}, operation=PutObject, version=null, service={name=s3.amazonaws.com}, request={uid=BRW2SJ5SBFD7T91W}} | Audit Activity | 3 | API Activity | 3005 | {region=eu-central-1, provider=AWS} | 20231107 | {user_agent=cloudtrail.amazonaws.com} | {product={version=1.09, name=CloudTrail, vendor_name=AWS, feature={name=Data}}, uid=69d54eee-2c1c-4f51-b89f-45c7029695c6, profiles=[cloud], version=1.0.0-rc.2} | b3c194e7-6580-421c-81fa-4b409e1ba04f | eu-central-1 | [{uid=arn:aws:s3:::aws-cloudtrail-logs-123456789012-cloudlake/AWSLogs/o-re4vuxlksb/123456789012/CloudTrail/eu-central-1/2023/11/07/123456789012_CloudTrail_eu-central-1_20231107T0740Z_dimNGWjAynOPPS1e.json.gz, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::aws-cloudtrail-logs-123456789012-cloudlake, account_uid=123456789012, type=AWS::S3::Bucket}] | Informational | 1 | {uid=null, ip=null, domain=cloudtrail.amazonaws.com} | Success | 1 | 1699342833000 | API Activity: Update | 300503 | {additionalEventData.SSEApplied=SSE_S3, requestParameters.x-amz-acl=bucket-owner-full-control, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, additionalEventData.x-amz-id-2=CKIV9mfKLdZHOitE2UvDr21Km4whgr92dar3i5Ew4/upKPfXc97MP45lpxnGo1mhPW7RQgvuQEs=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=aws-cloudtrail-logs-123456789012-cloudlake.s3.eu-central-1.amazonaws.com, requestParameters.x-amz-server-side-encryption=AES256, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=cd4f5d26-4491-40fc-a7d9-cb10f3e99ed3, requestParameters.key=AWSLogs/o-re4vuxlksb/123456789012/CloudTrail/eu-central-1/2023/11/07/123456789012_CloudTrail_eu-central-1_20231107T0740Z_dimNGWjAynOPPS1e.json.gz, requestParameters.bucketName=aws-cloudtrail-logs-123456789012-cloudlake, responseElements.x-amz-server-side-encryption=AES256, recipientAccountId=123456789012, managementEvent=false, additionalEventData.bytesTransferredIn=2347} | +| 123456789012 | 3 | Update | {user={type=AWSService, name=null, uid=null, uuid=null, account_uid=null, credential_uid=null}, session={created_time=null, mfa=null, issuer=null}, invoked_by=s3.amazonaws.com, idp={name=null}} | {response={error=null, message=null}, operation=PutObject, version=null, service={name=s3.amazonaws.com}, request={uid=55TVMR6HYD2ABTWB}} | Audit Activity | 3 | API Activity | 3005 | {region=eu-central-1, provider=AWS} | 20231107 | {user_agent=s3.amazonaws.com} | {product={version=1.09, name=CloudTrail, vendor_name=AWS, feature={name=Data}}, uid=2a0b2b1a-bff6-4a89-93e5-801e3fdf3b92, profiles=[cloud], version=1.0.0-rc.2} | b3c194e7-6580-421c-81fa-4b409e1ba04f | eu-central-1 | [{uid=arn:aws:s3:::test-log-collector/2023-11-07-07-39-32-43DC7FEEAE7DFCA6, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::test-log-collector, account_uid=123456789012, type=AWS::S3::Bucket}] | Informational | 1 | {uid=vpce-59a25a30, ip=null, domain=s3.amazonaws.com} | Success | 1 | 1699342772000 | API Activity: Update | 300503 | {additionalEventData.SSEApplied=Default_SSE_S3, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, responseElements.x-amz-version-id=LXBDlV4KJJB56OUOvwPNlb1v4Re5gpKH, additionalEventData.x-amz-id-2=0/+WsTcJgOlNKdHK/L2FmlK7IyBHUCeZDaXhRKtYAlWrPy0oTMVVtX41yxCh3HE5s2YMLFpwyGc=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=s3.eu-central-1.amazonaws.com, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=ae9c5147-998a-4a83-ae76-dd1916795763, requestParameters.key=2023-11-07-07-39-32-43DC7FEEAE7DFCA6, requestParameters.bucketName=test-log-collector, responseElements.x-amz-server-side-encryption=AES256, responseElements.x-amz-expiration=expiry-date="Wed, 15 Nov 2023 00:00:00 GMT", rule-id="Delete-older-the-7-days", recipientAccountId=123456789012, managementEvent=false, additionalEventData.bytesTransferredIn=480} | +| 123456789012 | 3 | Update | {user={type=AWSService, name=null, uid=null, uuid=null, account_uid=null, credential_uid=null}, session={created_time=null, mfa=null, issuer=null}, invoked_by=cloudtrail.amazonaws.com, idp={name=null}} | {response={error=null, message=null}, operation=PutObject, version=null, service={name=s3.amazonaws.com}, request={uid=5E1S778MRDJEDVSR}} | Audit Activity | 3 | API Activity | 3005 | {region=eu-central-1, provider=AWS} | 20231104 | {user_agent=cloudtrail.amazonaws.com} | {product={version=1.09, name=CloudTrail, vendor_name=AWS, feature={name=Data}}, uid=fee096f2-611d-4b3c-b092-ea06e8a527ca, profiles=[cloud], version=1.0.0-rc.2} | b3c194e7-6580-421c-81fa-4b409e1ba04f | eu-central-1 | [{uid=arn:aws:s3:::aws-cloudtrail-logs-123456789012-cloudlake/AWSLogs/o-re4vuxlksb/123456789012/CloudTrail/eu-central-1/2023/11/04/123456789012_CloudTrail_eu-central-1_20231104T2230Z_Ei3HUNUHdxFCuo9l.json.gz, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::aws-cloudtrail-logs-123456789012-cloudlake, account_uid=123456789012, type=AWS::S3::Bucket}] | Informational | 1 | {uid=null, ip=null, domain=cloudtrail.amazonaws.com} | Success | 1 | 1699137065000 | API Activity: Update | 300503 | {additionalEventData.SSEApplied=SSE_S3, requestParameters.x-amz-acl=bucket-owner-full-control, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, additionalEventData.x-amz-id-2=RITtRiXX1DGn4aCJ1AguFwc0Ux/HS6LSgIjiGlJBFdeFWMwmqQk1TlibDKq5kIA2xrSZc/qyl8w=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=aws-cloudtrail-logs-123456789012-cloudlake.s3.eu-central-1.amazonaws.com, requestParameters.x-amz-server-side-encryption=AES256, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=150e3c90-ce64-4311-b9de-67ef7077751a, requestParameters.key=AWSLogs/o-re4vuxlksb/123456789012/CloudTrail/eu-central-1/2023/11/04/123456789012_CloudTrail_eu-central-1_20231104T2230Z_Ei3HUNUHdxFCuo9l.json.gz, requestParameters.bucketName=aws-cloudtrail-logs-123456789012-cloudlake, responseElements.x-amz-server-side-encryption=AES256, recipientAccountId=123456789012, managementEvent=false, additionalEventData.bytesTransferredIn=1317} | +| 123456789012 | 3 | Update | {user={type=AWSService, name=null, uid=null, uuid=null, account_uid=null, credential_uid=null}, session={created_time=null, mfa=null, issuer=null}, invoked_by=cloudtrail.amazonaws.com, idp={name=null}} | {response={error=null, message=null}, operation=PutObject, version=null, service={name=s3.amazonaws.com}, request={uid=VDKRDR9XNA8H2MF8}} | Audit Activity | 3 | API Activity | 3005 | {region=eu-central-1, provider=AWS} | 20231104 | {user_agent=cloudtrail.amazonaws.com} | {product={version=1.09, name=CloudTrail, vendor_name=AWS, feature={name=Data}}, uid=68481d66-4a3e-42fe-a878-a1e6a7176c16, profiles=[cloud], version=1.0.0-rc.2} | b3c194e7-6580-421c-81fa-4b409e1ba04f | eu-central-1 | [{uid=arn:aws:s3:::aws-cloudtrail-logs-123456789012-cloudlake/AWSLogs/o-re4vuxlksb/668688824393/CloudTrail/eu-central-1/2023/11/04/668688824393_CloudTrail_eu-central-1_20231104T2230Z_cvjIkDeGdNDHOgZs.json.gz, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::aws-cloudtrail-logs-123456789012-cloudlake, account_uid=123456789012, type=AWS::S3::Bucket}] | Informational | 1 | {uid=null, ip=null, domain=cloudtrail.amazonaws.com} | Success | 1 | 1699137170000 | API Activity: Update | 300503 | {additionalEventData.SSEApplied=SSE_S3, requestParameters.x-amz-acl=bucket-owner-full-control, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, additionalEventData.x-amz-id-2=p+d+4x2c5Cq78w5YiikRKYtXsRxTFKFiklo3uPsWV9VkYTbLqxQxlKSOvJ/3pyFMV0ghlX4jSiw=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=aws-cloudtrail-logs-123456789012-cloudlake.s3.eu-central-1.amazonaws.com, requestParameters.x-amz-server-side-encryption=AES256, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=f7f916ab-0dd4-4e99-b522-b415b9c80458, requestParameters.key=AWSLogs/o-re4vuxlksb/668688824393/CloudTrail/eu-central-1/2023/11/04/668688824393_CloudTrail_eu-central-1_20231104T2230Z_cvjIkDeGdNDHOgZs.json.gz, requestParameters.bucketName=aws-cloudtrail-logs-123456789012-cloudlake, responseElements.x-amz-server-side-encryption=AES256, recipientAccountId=123456789012, managementEvent=false, additionalEventData.bytesTransferredIn=1199} | +| 123456789012 | 3 | Update | {user={type=AWSService, name=null, uid=null, uuid=null, account_uid=null, credential_uid=null}, session={created_time=null, mfa=null, issuer=null}, invoked_by=cloudtrail.amazonaws.com, idp={name=null}} | {response={error=null, message=null}, operation=PutObject, version=null, service={name=s3.amazonaws.com}, request={uid=PN8HM14HVKG8ED12}} | Audit Activity | 3 | API Activity | 3005 | {region=eu-central-1, provider=AWS} | 20231104 | {user_agent=cloudtrail.amazonaws.com} | {product={version=1.09, name=CloudTrail, vendor_name=AWS, feature={name=Data}}, uid=6a5583e7-0463-439a-89c6-5431d3cd58fe, profiles=[cloud], version=1.0.0-rc.2} | b3c194e7-6580-421c-81fa-4b409e1ba04f | eu-central-1 | [{uid=arn:aws:s3:::aws-cloudtrail-logs-123456789012-cloudlake/AWSLogs/o-re4vuxlksb/123456789012/CloudTrail/eu-central-1/2023/11/04/123456789012_CloudTrail_eu-central-1_20231104T2235Z_yvpVSHH3613gs2AK.json.gz, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::aws-cloudtrail-logs-123456789012-cloudlake, account_uid=123456789012, type=AWS::S3::Bucket}] | Informational | 1 | {uid=null, ip=null, domain=cloudtrail.amazonaws.com} | Success | 1 | 1699137247000 | API Activity: Update | 300503 | {additionalEventData.SSEApplied=SSE_S3, requestParameters.x-amz-acl=bucket-owner-full-control, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, additionalEventData.x-amz-id-2=xm6fkjpc6B9awVdyl1jxnkfp+1boyZ3slsj3MtybxFfeJ+fhC84Il8k2jKEVlK91DdoRzM+RVIw=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=aws-cloudtrail-logs-123456789012-cloudlake.s3.eu-central-1.amazonaws.com, requestParameters.x-amz-server-side-encryption=AES256, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=083a753d-7aae-4dcb-b5d9-703cfab873b4, requestParameters.key=AWSLogs/o-re4vuxlksb/123456789012/CloudTrail/eu-central-1/2023/11/04/123456789012_CloudTrail_eu-central-1_20231104T2235Z_yvpVSHH3613gs2AK.json.gz, requestParameters.bucketName=aws-cloudtrail-logs-123456789012-cloudlake, responseElements.x-amz-server-side-encryption=AES256, recipientAccountId=123456789012, managementEvent=false, additionalEventData.bytesTransferredIn=1556} | +| 123456789012 | 3 | Update | {user={type=AWSService, name=null, uid=null, uuid=null, account_uid=null, credential_uid=null}, session={created_time=null, mfa=null, issuer=null}, invoked_by=cloudtrail.amazonaws.com, idp={name=null}} | {response={error=null, message=null}, operation=PutObject, version=null, service={name=s3.amazonaws.com}, request={uid=J3HPX1A1NNHGKFKH}} | Audit Activity | 3 | API Activity | 3005 | {region=eu-central-1, provider=AWS} | 20231107 | {user_agent=cloudtrail.amazonaws.com} | {product={version=1.09, name=CloudTrail, vendor_name=AWS, feature={name=Data}}, uid=e3cc1c86-950f-4201-a362-3f8f68631a83, profiles=[cloud], version=1.0.0-rc.2} | b3c194e7-6580-421c-81fa-4b409e1ba04f | eu-central-1 | [{uid=arn:aws:s3:::aws-cloudtrail-logs-123456789012-cloudlake/AWSLogs/o-re4vuxlksb/668688824393/CloudTrail/eu-central-1/2023/11/07/668688824393_CloudTrail_eu-central-1_20231107T0740Z_jtWHnUtmZvFb95ZI.json.gz, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::aws-cloudtrail-logs-123456789012-cloudlake, account_uid=123456789012, type=AWS::S3::Bucket}] | Informational | 1 | {uid=null, ip=null, domain=cloudtrail.amazonaws.com} | Success | 1 | 1699342905000 | API Activity: Update | 300503 | {additionalEventData.SSEApplied=SSE_S3, requestParameters.x-amz-acl=bucket-owner-full-control, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, additionalEventData.x-amz-id-2=o5fB4Haj65lh3PSSxZ3GImGDkSUUQ2Vy0qEH2kifNcpzIn9KWAL4VECS2HMMq8abRt7X9q1X3W9r0tioo1ytzQ==, readOnly=false, eventType=AwsApiCall, requestParameters.Host=aws-cloudtrail-logs-123456789012-cloudlake.s3.eu-central-1.amazonaws.com, requestParameters.x-amz-server-side-encryption=AES256, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=b1991000-4fff-4444-a70e-c615aae8db9e, requestParameters.key=AWSLogs/o-re4vuxlksb/668688824393/CloudTrail/eu-central-1/2023/11/07/668688824393_CloudTrail_eu-central-1_20231107T0740Z_jtWHnUtmZvFb95ZI.json.gz, requestParameters.bucketName=aws-cloudtrail-logs-123456789012-cloudlake, responseElements.x-amz-server-side-encryption=AES256, recipientAccountId=123456789012, managementEvent=false, additionalEventData.bytesTransferredIn=1580} | +| 123456789012 | 3 | Update | {user={type=AWSService, name=null, uid=null, uuid=null, account_uid=null, credential_uid=null}, session={created_time=null, mfa=null, issuer=null}, invoked_by=cloudtrail.amazonaws.com, idp={name=null}} | {response={error=null, message=null}, operation=PutObject, version=null, service={name=s3.amazonaws.com}, request={uid=0J1R23EBRVY6T7ZJ}} | Audit Activity | 3 | API Activity | 3005 | {region=eu-central-1, provider=AWS} | 20231107 | {user_agent=cloudtrail.amazonaws.com} | {product={version=1.09, name=CloudTrail, vendor_name=AWS, feature={name=Data}}, uid=85ca9b31-7607-40bd-8f65-0e6f892550a5, profiles=[cloud], version=1.0.0-rc.2} | b3c194e7-6580-421c-81fa-4b409e1ba04f | eu-central-1 | [{uid=arn:aws:s3:::aws-cloudtrail-logs-123456789012-cloudlake/AWSLogs/o-re4vuxlksb/668688824393/CloudTrail/eu-central-1/2023/11/07/668688824393_CloudTrail_eu-central-1_20231107T0740Z_xvmpLIGhOzmt4n3B.json.gz, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::aws-cloudtrail-logs-123456789012-cloudlake, account_uid=123456789012, type=AWS::S3::Bucket}] | Informational | 1 | {uid=null, ip=null, domain=cloudtrail.amazonaws.com} | Success | 1 | 1699342908000 | API Activity: Update | 300503 | {additionalEventData.SSEApplied=SSE_S3, requestParameters.x-amz-acl=bucket-owner-full-control, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, additionalEventData.x-amz-id-2=iypPsK3E1swmRKTBokcowNGxPCNWvgnjXmMyOOvajrl8bGqbQVTmPUbXGLOYR2z553KaKn/HZF0=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=aws-cloudtrail-logs-123456789012-cloudlake.s3.eu-central-1.amazonaws.com, requestParameters.x-amz-server-side-encryption=AES256, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=d0089b3a-0175-4732-8726-aa8b83557c19, requestParameters.key=AWSLogs/o-re4vuxlksb/668688824393/CloudTrail/eu-central-1/2023/11/07/668688824393_CloudTrail_eu-central-1_20231107T0740Z_xvmpLIGhOzmt4n3B.json.gz, requestParameters.bucketName=aws-cloudtrail-logs-123456789012-cloudlake, responseElements.x-amz-server-side-encryption=AES256, recipientAccountId=123456789012, managementEvent=false, additionalEventData.bytesTransferredIn=2276} | +| 123456789012 | 3 | Update | {user={type=AWSService, name=null, uid=null, uuid=null, account_uid=null, credential_uid=null}, session={created_time=null, mfa=null, issuer=null}, invoked_by=s3.amazonaws.com, idp={name=null}} | {response={error=null, message=null}, operation=PutObject, version=null, service={name=s3.amazonaws.com}, request={uid=BW3N96A9B3H9MVBS}} | Audit Activity | 3 | API Activity | 3005 | {region=eu-central-1, provider=AWS} | 20231107 | {user_agent=s3.amazonaws.com} | {product={version=1.09, name=CloudTrail, vendor_name=AWS, feature={name=Data}}, uid=a7a07526-e890-4697-85a5-3d9cf4ab1e9b, profiles=[cloud], version=1.0.0-rc.2} | b3c194e7-6580-421c-81fa-4b409e1ba04f | eu-central-1 | [{uid=arn:aws:s3:::test-log-collector/2023-11-07-07-41-57-110CCCE05A2BEE37, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::test-log-collector, account_uid=123456789012, type=AWS::S3::Bucket}] | Informational | 1 | {uid=vpce-59a25a30, ip=null, domain=s3.amazonaws.com} | Success | 1 | 1699342917000 | API Activity: Update | 300503 | {additionalEventData.SSEApplied=Default_SSE_S3, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, responseElements.x-amz-version-id=193ieSvoNFv1o.tUJKgchRloNHbERbcu, additionalEventData.x-amz-id-2=jIV/CP92IrpLpnNRe2zwJj9+c9Rg3EsUyM2AIRSYR19hm8Umi4gnDdP9NrDJszZO4EkOgWiJQz8=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=s3.eu-central-1.amazonaws.com, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=80ec7a41-6396-453a-8ec9-993c46b371be, requestParameters.key=2023-11-07-07-41-57-110CCCE05A2BEE37, requestParameters.bucketName=test-log-collector, responseElements.x-amz-server-side-encryption=AES256, responseElements.x-amz-expiration=expiry-date="Wed, 15 Nov 2023 00:00:00 GMT", rule-id="Delete-older-the-7-days", recipientAccountId=123456789012, managementEvent=false, additionalEventData.bytesTransferredIn=480} | +| 123456789012 | 3 | Update | {user={type=AWSService, name=null, uid=null, uuid=null, account_uid=null, credential_uid=null}, session={created_time=null, mfa=null, issuer=null}, invoked_by=s3.amazonaws.com, idp={name=null}} | {response={error=null, message=null}, operation=PutObject, version=null, service={name=s3.amazonaws.com}, request={uid=8A48J06NSTRZZH41}} | Audit Activity | 3 | API Activity | 3005 | {region=eu-central-1, provider=AWS} | 20231107 | {user_agent=s3.amazonaws.com} | {product={version=1.09, name=CloudTrail, vendor_name=AWS, feature={name=Data}}, uid=8f60fe7d-1b50-42f5-956c-cd0f60fc98ed, profiles=[cloud], version=1.0.0-rc.2} | b3c194e7-6580-421c-81fa-4b409e1ba04f | eu-central-1 | [{uid=arn:aws:s3:::test-log-collector/2023-11-07-07-42-28-FC0CDE158967D4CF, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::test-log-collector, account_uid=123456789012, type=AWS::S3::Bucket}] | Informational | 1 | {uid=vpce-59a25a30, ip=null, domain=s3.amazonaws.com} | Success | 1 | 1699342948000 | API Activity: Update | 300503 | {additionalEventData.SSEApplied=Default_SSE_S3, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, responseElements.x-amz-version-id=IBZgLYMsVU50YUHYOt9rQ1R28oTy_rHd, additionalEventData.x-amz-id-2=TCnuNhf35xx4buXCo34P8TdlAV4hkDit07I8iUuqpjcyvWOXRGuPAuFGd+5tbmPRXN5+MbY57zU=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=s3.eu-central-1.amazonaws.com, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=93494c71-ec89-4b9a-8607-6f8ff7460ff9, requestParameters.key=2023-11-07-07-42-28-FC0CDE158967D4CF, requestParameters.bucketName=test-log-collector, responseElements.x-amz-server-side-encryption=AES256, responseElements.x-amz-expiration=expiry-date="Wed, 15 Nov 2023 00:00:00 GMT", rule-id="Delete-older-the-7-days", recipientAccountId=123456789012, managementEvent=false, additionalEventData.bytesTransferredIn=960} | diff --git a/Packs/AWS-Athena/Integrations/AWS-Athena/test_data/raw_data_mock/get_query_results.json b/Packs/AWS-Athena/Integrations/AWS-Athena/test_data/raw_data_mock/get_query_results.json index 25b9d0eb82d4..40b02f2f4452 100644 --- a/Packs/AWS-Athena/Integrations/AWS-Athena/test_data/raw_data_mock/get_query_results.json +++ b/Packs/AWS-Athena/Integrations/AWS-Athena/test_data/raw_data_mock/get_query_results.json @@ -112,7 +112,7 @@ "VarCharValue": "{uid=vpce-59a25a30, ip=null, domain=s3.amazonaws.com}" }, { - "VarCharValue": "[{uid=arn:aws:s3:::test-log-collector/2023-11-07-07-40-08-CD1B1BC0934C71D4, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::test-log-collector, account_uid=654338056632, type=AWS::S3::Bucket}]" + "VarCharValue": "[{uid=arn:aws:s3:::test-log-collector/2023-11-07-07-40-08-CD1B1BC0934C71D4, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::test-log-collector, account_uid=123456789012, type=AWS::S3::Bucket}]" }, { "VarCharValue": "API Activity" @@ -153,13 +153,13 @@ }, {}, { - "VarCharValue": "{additionalEventData.SSEApplied=Default_SSE_S3, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, responseElements.x-amz-version-id=CSwJV1zFxiBSHjrTGaIUH5FMMVcgl05W, additionalEventData.x-amz-id-2=71IAMvFJ3O5bJRBlpJCB3l0B8CzNy5sG7BECw2I1R4oHyvkV2FKixZqmYAir4Y5h5ldtxWA+xlk=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=s3.eu-central-1.amazonaws.com, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=6ffdc2dd-05ca-483e-8baa-842949fddced, requestParameters.key=2023-11-07-07-40-08-CD1B1BC0934C71D4, requestParameters.bucketName=test-log-collector, responseElements.x-amz-server-side-encryption=AES256, responseElements.x-amz-expiration=expiry-date=\"Wed, 15 Nov 2023 00:00:00 GMT\", rule-id=\"Delete-older-the-7-days\", recipientAccountId=654338056632, managementEvent=false, additionalEventData.bytesTransferredIn=480}" + "VarCharValue": "{additionalEventData.SSEApplied=Default_SSE_S3, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, responseElements.x-amz-version-id=CSwJV1zFxiBSHjrTGaIUH5FMMVcgl05W, additionalEventData.x-amz-id-2=71IAMvFJ3O5bJRBlpJCB3l0B8CzNy5sG7BECw2I1R4oHyvkV2FKixZqmYAir4Y5h5ldtxWA+xlk=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=s3.eu-central-1.amazonaws.com, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=6ffdc2dd-05ca-483e-8baa-842949fddced, requestParameters.key=2023-11-07-07-40-08-CD1B1BC0934C71D4, requestParameters.bucketName=test-log-collector, responseElements.x-amz-server-side-encryption=AES256, responseElements.x-amz-expiration=expiry-date=\"Wed, 15 Nov 2023 00:00:00 GMT\", rule-id=\"Delete-older-the-7-days\", recipientAccountId=123456789012, managementEvent=false, additionalEventData.bytesTransferredIn=480}" }, { "VarCharValue": "eu-central-1" }, { - "VarCharValue": "654338056632" + "VarCharValue": "123456789012" }, { "VarCharValue": "20231107" @@ -191,7 +191,7 @@ "VarCharValue": "{uid=null, ip=null, domain=cloudtrail.amazonaws.com}" }, { - "VarCharValue": "[{uid=arn:aws:s3:::aws-cloudtrail-logs-654338056632-cloudlake/AWSLogs/o-re4vuxlksb/654338056632/CloudTrail/eu-central-1/2023/11/07/654338056632_CloudTrail_eu-central-1_20231107T0740Z_dimNGWjAynOPPS1e.json.gz, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::aws-cloudtrail-logs-654338056632-cloudlake, account_uid=654338056632, type=AWS::S3::Bucket}]" + "VarCharValue": "[{uid=arn:aws:s3:::aws-cloudtrail-logs-123456789012-cloudlake/AWSLogs/o-re4vuxlksb/123456789012/CloudTrail/eu-central-1/2023/11/07/123456789012_CloudTrail_eu-central-1_20231107T0740Z_dimNGWjAynOPPS1e.json.gz, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::aws-cloudtrail-logs-123456789012-cloudlake, account_uid=123456789012, type=AWS::S3::Bucket}]" }, { "VarCharValue": "API Activity" @@ -232,13 +232,13 @@ }, {}, { - "VarCharValue": "{additionalEventData.SSEApplied=SSE_S3, requestParameters.x-amz-acl=bucket-owner-full-control, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, additionalEventData.x-amz-id-2=CKIV9mfKLdZHOitE2UvDr21Km4whgr92dar3i5Ew4/upKPfXc97MP45lpxnGo1mhPW7RQgvuQEs=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=aws-cloudtrail-logs-654338056632-cloudlake.s3.eu-central-1.amazonaws.com, requestParameters.x-amz-server-side-encryption=AES256, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=cd4f5d26-4491-40fc-a7d9-cb10f3e99ed3, requestParameters.key=AWSLogs/o-re4vuxlksb/654338056632/CloudTrail/eu-central-1/2023/11/07/654338056632_CloudTrail_eu-central-1_20231107T0740Z_dimNGWjAynOPPS1e.json.gz, requestParameters.bucketName=aws-cloudtrail-logs-654338056632-cloudlake, responseElements.x-amz-server-side-encryption=AES256, recipientAccountId=654338056632, managementEvent=false, additionalEventData.bytesTransferredIn=2347}" + "VarCharValue": "{additionalEventData.SSEApplied=SSE_S3, requestParameters.x-amz-acl=bucket-owner-full-control, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, additionalEventData.x-amz-id-2=CKIV9mfKLdZHOitE2UvDr21Km4whgr92dar3i5Ew4/upKPfXc97MP45lpxnGo1mhPW7RQgvuQEs=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=aws-cloudtrail-logs-123456789012-cloudlake.s3.eu-central-1.amazonaws.com, requestParameters.x-amz-server-side-encryption=AES256, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=cd4f5d26-4491-40fc-a7d9-cb10f3e99ed3, requestParameters.key=AWSLogs/o-re4vuxlksb/123456789012/CloudTrail/eu-central-1/2023/11/07/123456789012_CloudTrail_eu-central-1_20231107T0740Z_dimNGWjAynOPPS1e.json.gz, requestParameters.bucketName=aws-cloudtrail-logs-123456789012-cloudlake, responseElements.x-amz-server-side-encryption=AES256, recipientAccountId=123456789012, managementEvent=false, additionalEventData.bytesTransferredIn=2347}" }, { "VarCharValue": "eu-central-1" }, { - "VarCharValue": "654338056632" + "VarCharValue": "123456789012" }, { "VarCharValue": "20231107" @@ -270,7 +270,7 @@ "VarCharValue": "{uid=vpce-59a25a30, ip=null, domain=s3.amazonaws.com}" }, { - "VarCharValue": "[{uid=arn:aws:s3:::test-log-collector/2023-11-07-07-39-32-43DC7FEEAE7DFCA6, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::test-log-collector, account_uid=654338056632, type=AWS::S3::Bucket}]" + "VarCharValue": "[{uid=arn:aws:s3:::test-log-collector/2023-11-07-07-39-32-43DC7FEEAE7DFCA6, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::test-log-collector, account_uid=123456789012, type=AWS::S3::Bucket}]" }, { "VarCharValue": "API Activity" @@ -311,13 +311,13 @@ }, {}, { - "VarCharValue": "{additionalEventData.SSEApplied=Default_SSE_S3, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, responseElements.x-amz-version-id=LXBDlV4KJJB56OUOvwPNlb1v4Re5gpKH, additionalEventData.x-amz-id-2=0/+WsTcJgOlNKdHK/L2FmlK7IyBHUCeZDaXhRKtYAlWrPy0oTMVVtX41yxCh3HE5s2YMLFpwyGc=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=s3.eu-central-1.amazonaws.com, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=ae9c5147-998a-4a83-ae76-dd1916795763, requestParameters.key=2023-11-07-07-39-32-43DC7FEEAE7DFCA6, requestParameters.bucketName=test-log-collector, responseElements.x-amz-server-side-encryption=AES256, responseElements.x-amz-expiration=expiry-date=\"Wed, 15 Nov 2023 00:00:00 GMT\", rule-id=\"Delete-older-the-7-days\", recipientAccountId=654338056632, managementEvent=false, additionalEventData.bytesTransferredIn=480}" + "VarCharValue": "{additionalEventData.SSEApplied=Default_SSE_S3, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, responseElements.x-amz-version-id=LXBDlV4KJJB56OUOvwPNlb1v4Re5gpKH, additionalEventData.x-amz-id-2=0/+WsTcJgOlNKdHK/L2FmlK7IyBHUCeZDaXhRKtYAlWrPy0oTMVVtX41yxCh3HE5s2YMLFpwyGc=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=s3.eu-central-1.amazonaws.com, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=ae9c5147-998a-4a83-ae76-dd1916795763, requestParameters.key=2023-11-07-07-39-32-43DC7FEEAE7DFCA6, requestParameters.bucketName=test-log-collector, responseElements.x-amz-server-side-encryption=AES256, responseElements.x-amz-expiration=expiry-date=\"Wed, 15 Nov 2023 00:00:00 GMT\", rule-id=\"Delete-older-the-7-days\", recipientAccountId=123456789012, managementEvent=false, additionalEventData.bytesTransferredIn=480}" }, { "VarCharValue": "eu-central-1" }, { - "VarCharValue": "654338056632" + "VarCharValue": "123456789012" }, { "VarCharValue": "20231107" @@ -349,7 +349,7 @@ "VarCharValue": "{uid=null, ip=null, domain=cloudtrail.amazonaws.com}" }, { - "VarCharValue": "[{uid=arn:aws:s3:::aws-cloudtrail-logs-654338056632-cloudlake/AWSLogs/o-re4vuxlksb/654338056632/CloudTrail/eu-central-1/2023/11/04/654338056632_CloudTrail_eu-central-1_20231104T2230Z_Ei3HUNUHdxFCuo9l.json.gz, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::aws-cloudtrail-logs-654338056632-cloudlake, account_uid=654338056632, type=AWS::S3::Bucket}]" + "VarCharValue": "[{uid=arn:aws:s3:::aws-cloudtrail-logs-123456789012-cloudlake/AWSLogs/o-re4vuxlksb/123456789012/CloudTrail/eu-central-1/2023/11/04/123456789012_CloudTrail_eu-central-1_20231104T2230Z_Ei3HUNUHdxFCuo9l.json.gz, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::aws-cloudtrail-logs-123456789012-cloudlake, account_uid=123456789012, type=AWS::S3::Bucket}]" }, { "VarCharValue": "API Activity" @@ -390,13 +390,13 @@ }, {}, { - "VarCharValue": "{additionalEventData.SSEApplied=SSE_S3, requestParameters.x-amz-acl=bucket-owner-full-control, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, additionalEventData.x-amz-id-2=RITtRiXX1DGn4aCJ1AguFwc0Ux/HS6LSgIjiGlJBFdeFWMwmqQk1TlibDKq5kIA2xrSZc/qyl8w=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=aws-cloudtrail-logs-654338056632-cloudlake.s3.eu-central-1.amazonaws.com, requestParameters.x-amz-server-side-encryption=AES256, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=150e3c90-ce64-4311-b9de-67ef7077751a, requestParameters.key=AWSLogs/o-re4vuxlksb/654338056632/CloudTrail/eu-central-1/2023/11/04/654338056632_CloudTrail_eu-central-1_20231104T2230Z_Ei3HUNUHdxFCuo9l.json.gz, requestParameters.bucketName=aws-cloudtrail-logs-654338056632-cloudlake, responseElements.x-amz-server-side-encryption=AES256, recipientAccountId=654338056632, managementEvent=false, additionalEventData.bytesTransferredIn=1317}" + "VarCharValue": "{additionalEventData.SSEApplied=SSE_S3, requestParameters.x-amz-acl=bucket-owner-full-control, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, additionalEventData.x-amz-id-2=RITtRiXX1DGn4aCJ1AguFwc0Ux/HS6LSgIjiGlJBFdeFWMwmqQk1TlibDKq5kIA2xrSZc/qyl8w=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=aws-cloudtrail-logs-123456789012-cloudlake.s3.eu-central-1.amazonaws.com, requestParameters.x-amz-server-side-encryption=AES256, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=150e3c90-ce64-4311-b9de-67ef7077751a, requestParameters.key=AWSLogs/o-re4vuxlksb/123456789012/CloudTrail/eu-central-1/2023/11/04/123456789012_CloudTrail_eu-central-1_20231104T2230Z_Ei3HUNUHdxFCuo9l.json.gz, requestParameters.bucketName=aws-cloudtrail-logs-123456789012-cloudlake, responseElements.x-amz-server-side-encryption=AES256, recipientAccountId=123456789012, managementEvent=false, additionalEventData.bytesTransferredIn=1317}" }, { "VarCharValue": "eu-central-1" }, { - "VarCharValue": "654338056632" + "VarCharValue": "123456789012" }, { "VarCharValue": "20231104" @@ -428,7 +428,7 @@ "VarCharValue": "{uid=null, ip=null, domain=cloudtrail.amazonaws.com}" }, { - "VarCharValue": "[{uid=arn:aws:s3:::aws-cloudtrail-logs-654338056632-cloudlake/AWSLogs/o-re4vuxlksb/668688824393/CloudTrail/eu-central-1/2023/11/04/668688824393_CloudTrail_eu-central-1_20231104T2230Z_cvjIkDeGdNDHOgZs.json.gz, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::aws-cloudtrail-logs-654338056632-cloudlake, account_uid=654338056632, type=AWS::S3::Bucket}]" + "VarCharValue": "[{uid=arn:aws:s3:::aws-cloudtrail-logs-123456789012-cloudlake/AWSLogs/o-re4vuxlksb/668688824393/CloudTrail/eu-central-1/2023/11/04/668688824393_CloudTrail_eu-central-1_20231104T2230Z_cvjIkDeGdNDHOgZs.json.gz, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::aws-cloudtrail-logs-123456789012-cloudlake, account_uid=123456789012, type=AWS::S3::Bucket}]" }, { "VarCharValue": "API Activity" @@ -469,13 +469,13 @@ }, {}, { - "VarCharValue": "{additionalEventData.SSEApplied=SSE_S3, requestParameters.x-amz-acl=bucket-owner-full-control, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, additionalEventData.x-amz-id-2=p+d+4x2c5Cq78w5YiikRKYtXsRxTFKFiklo3uPsWV9VkYTbLqxQxlKSOvJ/3pyFMV0ghlX4jSiw=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=aws-cloudtrail-logs-654338056632-cloudlake.s3.eu-central-1.amazonaws.com, requestParameters.x-amz-server-side-encryption=AES256, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=f7f916ab-0dd4-4e99-b522-b415b9c80458, requestParameters.key=AWSLogs/o-re4vuxlksb/668688824393/CloudTrail/eu-central-1/2023/11/04/668688824393_CloudTrail_eu-central-1_20231104T2230Z_cvjIkDeGdNDHOgZs.json.gz, requestParameters.bucketName=aws-cloudtrail-logs-654338056632-cloudlake, responseElements.x-amz-server-side-encryption=AES256, recipientAccountId=654338056632, managementEvent=false, additionalEventData.bytesTransferredIn=1199}" + "VarCharValue": "{additionalEventData.SSEApplied=SSE_S3, requestParameters.x-amz-acl=bucket-owner-full-control, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, additionalEventData.x-amz-id-2=p+d+4x2c5Cq78w5YiikRKYtXsRxTFKFiklo3uPsWV9VkYTbLqxQxlKSOvJ/3pyFMV0ghlX4jSiw=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=aws-cloudtrail-logs-123456789012-cloudlake.s3.eu-central-1.amazonaws.com, requestParameters.x-amz-server-side-encryption=AES256, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=f7f916ab-0dd4-4e99-b522-b415b9c80458, requestParameters.key=AWSLogs/o-re4vuxlksb/668688824393/CloudTrail/eu-central-1/2023/11/04/668688824393_CloudTrail_eu-central-1_20231104T2230Z_cvjIkDeGdNDHOgZs.json.gz, requestParameters.bucketName=aws-cloudtrail-logs-123456789012-cloudlake, responseElements.x-amz-server-side-encryption=AES256, recipientAccountId=123456789012, managementEvent=false, additionalEventData.bytesTransferredIn=1199}" }, { "VarCharValue": "eu-central-1" }, { - "VarCharValue": "654338056632" + "VarCharValue": "123456789012" }, { "VarCharValue": "20231104" @@ -507,7 +507,7 @@ "VarCharValue": "{uid=null, ip=null, domain=cloudtrail.amazonaws.com}" }, { - "VarCharValue": "[{uid=arn:aws:s3:::aws-cloudtrail-logs-654338056632-cloudlake/AWSLogs/o-re4vuxlksb/654338056632/CloudTrail/eu-central-1/2023/11/04/654338056632_CloudTrail_eu-central-1_20231104T2235Z_yvpVSHH3613gs2AK.json.gz, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::aws-cloudtrail-logs-654338056632-cloudlake, account_uid=654338056632, type=AWS::S3::Bucket}]" + "VarCharValue": "[{uid=arn:aws:s3:::aws-cloudtrail-logs-123456789012-cloudlake/AWSLogs/o-re4vuxlksb/123456789012/CloudTrail/eu-central-1/2023/11/04/123456789012_CloudTrail_eu-central-1_20231104T2235Z_yvpVSHH3613gs2AK.json.gz, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::aws-cloudtrail-logs-123456789012-cloudlake, account_uid=123456789012, type=AWS::S3::Bucket}]" }, { "VarCharValue": "API Activity" @@ -548,13 +548,13 @@ }, {}, { - "VarCharValue": "{additionalEventData.SSEApplied=SSE_S3, requestParameters.x-amz-acl=bucket-owner-full-control, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, additionalEventData.x-amz-id-2=xm6fkjpc6B9awVdyl1jxnkfp+1boyZ3slsj3MtybxFfeJ+fhC84Il8k2jKEVlK91DdoRzM+RVIw=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=aws-cloudtrail-logs-654338056632-cloudlake.s3.eu-central-1.amazonaws.com, requestParameters.x-amz-server-side-encryption=AES256, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=083a753d-7aae-4dcb-b5d9-703cfab873b4, requestParameters.key=AWSLogs/o-re4vuxlksb/654338056632/CloudTrail/eu-central-1/2023/11/04/654338056632_CloudTrail_eu-central-1_20231104T2235Z_yvpVSHH3613gs2AK.json.gz, requestParameters.bucketName=aws-cloudtrail-logs-654338056632-cloudlake, responseElements.x-amz-server-side-encryption=AES256, recipientAccountId=654338056632, managementEvent=false, additionalEventData.bytesTransferredIn=1556}" + "VarCharValue": "{additionalEventData.SSEApplied=SSE_S3, requestParameters.x-amz-acl=bucket-owner-full-control, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, additionalEventData.x-amz-id-2=xm6fkjpc6B9awVdyl1jxnkfp+1boyZ3slsj3MtybxFfeJ+fhC84Il8k2jKEVlK91DdoRzM+RVIw=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=aws-cloudtrail-logs-123456789012-cloudlake.s3.eu-central-1.amazonaws.com, requestParameters.x-amz-server-side-encryption=AES256, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=083a753d-7aae-4dcb-b5d9-703cfab873b4, requestParameters.key=AWSLogs/o-re4vuxlksb/123456789012/CloudTrail/eu-central-1/2023/11/04/123456789012_CloudTrail_eu-central-1_20231104T2235Z_yvpVSHH3613gs2AK.json.gz, requestParameters.bucketName=aws-cloudtrail-logs-123456789012-cloudlake, responseElements.x-amz-server-side-encryption=AES256, recipientAccountId=123456789012, managementEvent=false, additionalEventData.bytesTransferredIn=1556}" }, { "VarCharValue": "eu-central-1" }, { - "VarCharValue": "654338056632" + "VarCharValue": "123456789012" }, { "VarCharValue": "20231104" @@ -586,7 +586,7 @@ "VarCharValue": "{uid=null, ip=null, domain=cloudtrail.amazonaws.com}" }, { - "VarCharValue": "[{uid=arn:aws:s3:::aws-cloudtrail-logs-654338056632-cloudlake/AWSLogs/o-re4vuxlksb/668688824393/CloudTrail/eu-central-1/2023/11/07/668688824393_CloudTrail_eu-central-1_20231107T0740Z_jtWHnUtmZvFb95ZI.json.gz, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::aws-cloudtrail-logs-654338056632-cloudlake, account_uid=654338056632, type=AWS::S3::Bucket}]" + "VarCharValue": "[{uid=arn:aws:s3:::aws-cloudtrail-logs-123456789012-cloudlake/AWSLogs/o-re4vuxlksb/668688824393/CloudTrail/eu-central-1/2023/11/07/668688824393_CloudTrail_eu-central-1_20231107T0740Z_jtWHnUtmZvFb95ZI.json.gz, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::aws-cloudtrail-logs-123456789012-cloudlake, account_uid=123456789012, type=AWS::S3::Bucket}]" }, { "VarCharValue": "API Activity" @@ -627,13 +627,13 @@ }, {}, { - "VarCharValue": "{additionalEventData.SSEApplied=SSE_S3, requestParameters.x-amz-acl=bucket-owner-full-control, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, additionalEventData.x-amz-id-2=o5fB4Haj65lh3PSSxZ3GImGDkSUUQ2Vy0qEH2kifNcpzIn9KWAL4VECS2HMMq8abRt7X9q1X3W9r0tioo1ytzQ==, readOnly=false, eventType=AwsApiCall, requestParameters.Host=aws-cloudtrail-logs-654338056632-cloudlake.s3.eu-central-1.amazonaws.com, requestParameters.x-amz-server-side-encryption=AES256, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=b1991000-4fff-4444-a70e-c615aae8db9e, requestParameters.key=AWSLogs/o-re4vuxlksb/668688824393/CloudTrail/eu-central-1/2023/11/07/668688824393_CloudTrail_eu-central-1_20231107T0740Z_jtWHnUtmZvFb95ZI.json.gz, requestParameters.bucketName=aws-cloudtrail-logs-654338056632-cloudlake, responseElements.x-amz-server-side-encryption=AES256, recipientAccountId=654338056632, managementEvent=false, additionalEventData.bytesTransferredIn=1580}" + "VarCharValue": "{additionalEventData.SSEApplied=SSE_S3, requestParameters.x-amz-acl=bucket-owner-full-control, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, additionalEventData.x-amz-id-2=o5fB4Haj65lh3PSSxZ3GImGDkSUUQ2Vy0qEH2kifNcpzIn9KWAL4VECS2HMMq8abRt7X9q1X3W9r0tioo1ytzQ==, readOnly=false, eventType=AwsApiCall, requestParameters.Host=aws-cloudtrail-logs-123456789012-cloudlake.s3.eu-central-1.amazonaws.com, requestParameters.x-amz-server-side-encryption=AES256, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=b1991000-4fff-4444-a70e-c615aae8db9e, requestParameters.key=AWSLogs/o-re4vuxlksb/668688824393/CloudTrail/eu-central-1/2023/11/07/668688824393_CloudTrail_eu-central-1_20231107T0740Z_jtWHnUtmZvFb95ZI.json.gz, requestParameters.bucketName=aws-cloudtrail-logs-123456789012-cloudlake, responseElements.x-amz-server-side-encryption=AES256, recipientAccountId=123456789012, managementEvent=false, additionalEventData.bytesTransferredIn=1580}" }, { "VarCharValue": "eu-central-1" }, { - "VarCharValue": "654338056632" + "VarCharValue": "123456789012" }, { "VarCharValue": "20231107" @@ -665,7 +665,7 @@ "VarCharValue": "{uid=null, ip=null, domain=cloudtrail.amazonaws.com}" }, { - "VarCharValue": "[{uid=arn:aws:s3:::aws-cloudtrail-logs-654338056632-cloudlake/AWSLogs/o-re4vuxlksb/668688824393/CloudTrail/eu-central-1/2023/11/07/668688824393_CloudTrail_eu-central-1_20231107T0740Z_xvmpLIGhOzmt4n3B.json.gz, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::aws-cloudtrail-logs-654338056632-cloudlake, account_uid=654338056632, type=AWS::S3::Bucket}]" + "VarCharValue": "[{uid=arn:aws:s3:::aws-cloudtrail-logs-123456789012-cloudlake/AWSLogs/o-re4vuxlksb/668688824393/CloudTrail/eu-central-1/2023/11/07/668688824393_CloudTrail_eu-central-1_20231107T0740Z_xvmpLIGhOzmt4n3B.json.gz, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::aws-cloudtrail-logs-123456789012-cloudlake, account_uid=123456789012, type=AWS::S3::Bucket}]" }, { "VarCharValue": "API Activity" @@ -706,13 +706,13 @@ }, {}, { - "VarCharValue": "{additionalEventData.SSEApplied=SSE_S3, requestParameters.x-amz-acl=bucket-owner-full-control, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, additionalEventData.x-amz-id-2=iypPsK3E1swmRKTBokcowNGxPCNWvgnjXmMyOOvajrl8bGqbQVTmPUbXGLOYR2z553KaKn/HZF0=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=aws-cloudtrail-logs-654338056632-cloudlake.s3.eu-central-1.amazonaws.com, requestParameters.x-amz-server-side-encryption=AES256, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=d0089b3a-0175-4732-8726-aa8b83557c19, requestParameters.key=AWSLogs/o-re4vuxlksb/668688824393/CloudTrail/eu-central-1/2023/11/07/668688824393_CloudTrail_eu-central-1_20231107T0740Z_xvmpLIGhOzmt4n3B.json.gz, requestParameters.bucketName=aws-cloudtrail-logs-654338056632-cloudlake, responseElements.x-amz-server-side-encryption=AES256, recipientAccountId=654338056632, managementEvent=false, additionalEventData.bytesTransferredIn=2276}" + "VarCharValue": "{additionalEventData.SSEApplied=SSE_S3, requestParameters.x-amz-acl=bucket-owner-full-control, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, additionalEventData.x-amz-id-2=iypPsK3E1swmRKTBokcowNGxPCNWvgnjXmMyOOvajrl8bGqbQVTmPUbXGLOYR2z553KaKn/HZF0=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=aws-cloudtrail-logs-123456789012-cloudlake.s3.eu-central-1.amazonaws.com, requestParameters.x-amz-server-side-encryption=AES256, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=d0089b3a-0175-4732-8726-aa8b83557c19, requestParameters.key=AWSLogs/o-re4vuxlksb/668688824393/CloudTrail/eu-central-1/2023/11/07/668688824393_CloudTrail_eu-central-1_20231107T0740Z_xvmpLIGhOzmt4n3B.json.gz, requestParameters.bucketName=aws-cloudtrail-logs-123456789012-cloudlake, responseElements.x-amz-server-side-encryption=AES256, recipientAccountId=123456789012, managementEvent=false, additionalEventData.bytesTransferredIn=2276}" }, { "VarCharValue": "eu-central-1" }, { - "VarCharValue": "654338056632" + "VarCharValue": "123456789012" }, { "VarCharValue": "20231107" @@ -744,7 +744,7 @@ "VarCharValue": "{uid=vpce-59a25a30, ip=null, domain=s3.amazonaws.com}" }, { - "VarCharValue": "[{uid=arn:aws:s3:::test-log-collector/2023-11-07-07-41-57-110CCCE05A2BEE37, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::test-log-collector, account_uid=654338056632, type=AWS::S3::Bucket}]" + "VarCharValue": "[{uid=arn:aws:s3:::test-log-collector/2023-11-07-07-41-57-110CCCE05A2BEE37, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::test-log-collector, account_uid=123456789012, type=AWS::S3::Bucket}]" }, { "VarCharValue": "API Activity" @@ -785,13 +785,13 @@ }, {}, { - "VarCharValue": "{additionalEventData.SSEApplied=Default_SSE_S3, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, responseElements.x-amz-version-id=193ieSvoNFv1o.tUJKgchRloNHbERbcu, additionalEventData.x-amz-id-2=jIV/CP92IrpLpnNRe2zwJj9+c9Rg3EsUyM2AIRSYR19hm8Umi4gnDdP9NrDJszZO4EkOgWiJQz8=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=s3.eu-central-1.amazonaws.com, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=80ec7a41-6396-453a-8ec9-993c46b371be, requestParameters.key=2023-11-07-07-41-57-110CCCE05A2BEE37, requestParameters.bucketName=test-log-collector, responseElements.x-amz-server-side-encryption=AES256, responseElements.x-amz-expiration=expiry-date=\"Wed, 15 Nov 2023 00:00:00 GMT\", rule-id=\"Delete-older-the-7-days\", recipientAccountId=654338056632, managementEvent=false, additionalEventData.bytesTransferredIn=480}" + "VarCharValue": "{additionalEventData.SSEApplied=Default_SSE_S3, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, responseElements.x-amz-version-id=193ieSvoNFv1o.tUJKgchRloNHbERbcu, additionalEventData.x-amz-id-2=jIV/CP92IrpLpnNRe2zwJj9+c9Rg3EsUyM2AIRSYR19hm8Umi4gnDdP9NrDJszZO4EkOgWiJQz8=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=s3.eu-central-1.amazonaws.com, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=80ec7a41-6396-453a-8ec9-993c46b371be, requestParameters.key=2023-11-07-07-41-57-110CCCE05A2BEE37, requestParameters.bucketName=test-log-collector, responseElements.x-amz-server-side-encryption=AES256, responseElements.x-amz-expiration=expiry-date=\"Wed, 15 Nov 2023 00:00:00 GMT\", rule-id=\"Delete-older-the-7-days\", recipientAccountId=123456789012, managementEvent=false, additionalEventData.bytesTransferredIn=480}" }, { "VarCharValue": "eu-central-1" }, { - "VarCharValue": "654338056632" + "VarCharValue": "123456789012" }, { "VarCharValue": "20231107" @@ -823,7 +823,7 @@ "VarCharValue": "{uid=vpce-59a25a30, ip=null, domain=s3.amazonaws.com}" }, { - "VarCharValue": "[{uid=arn:aws:s3:::test-log-collector/2023-11-07-07-42-28-FC0CDE158967D4CF, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::test-log-collector, account_uid=654338056632, type=AWS::S3::Bucket}]" + "VarCharValue": "[{uid=arn:aws:s3:::test-log-collector/2023-11-07-07-42-28-FC0CDE158967D4CF, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::test-log-collector, account_uid=123456789012, type=AWS::S3::Bucket}]" }, { "VarCharValue": "API Activity" @@ -864,13 +864,13 @@ }, {}, { - "VarCharValue": "{additionalEventData.SSEApplied=Default_SSE_S3, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, responseElements.x-amz-version-id=IBZgLYMsVU50YUHYOt9rQ1R28oTy_rHd, additionalEventData.x-amz-id-2=TCnuNhf35xx4buXCo34P8TdlAV4hkDit07I8iUuqpjcyvWOXRGuPAuFGd+5tbmPRXN5+MbY57zU=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=s3.eu-central-1.amazonaws.com, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=93494c71-ec89-4b9a-8607-6f8ff7460ff9, requestParameters.key=2023-11-07-07-42-28-FC0CDE158967D4CF, requestParameters.bucketName=test-log-collector, responseElements.x-amz-server-side-encryption=AES256, responseElements.x-amz-expiration=expiry-date=\"Wed, 15 Nov 2023 00:00:00 GMT\", rule-id=\"Delete-older-the-7-days\", recipientAccountId=654338056632, managementEvent=false, additionalEventData.bytesTransferredIn=960}" + "VarCharValue": "{additionalEventData.SSEApplied=Default_SSE_S3, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, responseElements.x-amz-version-id=IBZgLYMsVU50YUHYOt9rQ1R28oTy_rHd, additionalEventData.x-amz-id-2=TCnuNhf35xx4buXCo34P8TdlAV4hkDit07I8iUuqpjcyvWOXRGuPAuFGd+5tbmPRXN5+MbY57zU=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=s3.eu-central-1.amazonaws.com, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=93494c71-ec89-4b9a-8607-6f8ff7460ff9, requestParameters.key=2023-11-07-07-42-28-FC0CDE158967D4CF, requestParameters.bucketName=test-log-collector, responseElements.x-amz-server-side-encryption=AES256, responseElements.x-amz-expiration=expiry-date=\"Wed, 15 Nov 2023 00:00:00 GMT\", rule-id=\"Delete-older-the-7-days\", recipientAccountId=123456789012, managementEvent=false, additionalEventData.bytesTransferredIn=960}" }, { "VarCharValue": "eu-central-1" }, { - "VarCharValue": "654338056632" + "VarCharValue": "123456789012" }, { "VarCharValue": "20231107" diff --git a/Packs/AWS-EC2/Integrations/AWS-EC2/AWS-EC2_test.py b/Packs/AWS-EC2/Integrations/AWS-EC2/AWS-EC2_test.py index af7dfbb19c9c..242f97ae0a72 100644 --- a/Packs/AWS-EC2/Integrations/AWS-EC2/AWS-EC2_test.py +++ b/Packs/AWS-EC2/Integrations/AWS-EC2/AWS-EC2_test.py @@ -130,8 +130,8 @@ def test_aws_ec2_authorize_security_group_egress_rule(mocker): @pytest.mark.parametrize('filter, expected_results', [ - ("Name=iam-instance-profile.arn,Values=arn:aws:iam::664798938958:instance-profile/AmazonEKSNodeRole", - [{'Name': 'iam-instance-profile.arn', 'Values': ['arn:aws:iam::664798938958:instance-profile/AmazonEKSNodeRole']}]) + ("Name=iam-instance-profile.arn,Values=arn:aws:iam::123456789012:instance-profile/AmazonEKSNodeRole", + [{'Name': 'iam-instance-profile.arn', 'Values': ['arn:aws:iam::123456789012:instance-profile/AmazonEKSNodeRole']}]) ]) def test_parse_filter_field(filter, expected_results): """ diff --git a/Packs/AWS-EC2/ReleaseNotes/1_4_7.md b/Packs/AWS-EC2/ReleaseNotes/1_4_7.md new file mode 100644 index 000000000000..97148ac2fd0f --- /dev/null +++ b/Packs/AWS-EC2/ReleaseNotes/1_4_7.md @@ -0,0 +1,3 @@ +## AWS - EC2 + +- Locked dependencies of the pack to ensure stability for versioned core packs. No changes in this release. diff --git a/Packs/AWS-EC2/TestPlaybooks/playbook-AWS-EC2_test.yml b/Packs/AWS-EC2/TestPlaybooks/playbook-AWS-EC2_test.yml index 50df59307d81..29498e4a078e 100644 --- a/Packs/AWS-EC2/TestPlaybooks/playbook-AWS-EC2_test.yml +++ b/Packs/AWS-EC2/TestPlaybooks/playbook-AWS-EC2_test.yml @@ -1672,7 +1672,7 @@ tasks: LaunchTemplateName: simple: test roleArn: - simple: arn:aws:iam::794065701450:role/EC2_SG_API_CRTX-91813_AssumeRole + simple: arn:aws:iam::123456789012:role/EC2_SG_API_CRTX-91813_AssumeRole roleSessionDuration: simple: "1000" roleSessionName: diff --git a/Packs/AWS-EC2/pack_metadata.json b/Packs/AWS-EC2/pack_metadata.json index 8c9d3d741a47..d3b5ad596722 100644 --- a/Packs/AWS-EC2/pack_metadata.json +++ b/Packs/AWS-EC2/pack_metadata.json @@ -2,7 +2,7 @@ "name": "AWS - EC2", "description": "Amazon Web Services Elastic Compute Cloud (EC2)", "support": "xsoar", - "currentVersion": "1.4.6", + "currentVersion": "1.4.7", "author": "Cortex XSOAR", "url": "https://www.paloaltonetworks.com/cortex", "email": "", diff --git a/Packs/AWS-GuardDuty/Integrations/AWSGuardDutyEventCollector/README.md b/Packs/AWS-GuardDuty/Integrations/AWSGuardDutyEventCollector/README.md index bb10ba49f8e6..c7a6bdf2e751 100644 --- a/Packs/AWS-GuardDuty/Integrations/AWSGuardDutyEventCollector/README.md +++ b/Packs/AWS-GuardDuty/Integrations/AWSGuardDutyEventCollector/README.md @@ -1,5 +1,9 @@ Amazon Web Services Guard Duty Service Event Collector integration for Cortex XSIAM. +<~XSIAM> +This is the default integration for this content pack when configured by the Data Onboarder. + + ## Configure AWS - GuardDuty Event Collector on Cortex XSOAR 1. Navigate to **Settings** > **Configurations** > **Data Collection** > **Automation and Feed Integrations**. diff --git a/Packs/AWS-GuardDuty/pack_metadata.json b/Packs/AWS-GuardDuty/pack_metadata.json index fd720fa60928..730833aeff18 100644 --- a/Packs/AWS-GuardDuty/pack_metadata.json +++ b/Packs/AWS-GuardDuty/pack_metadata.json @@ -20,5 +20,6 @@ "marketplaces": [ "xsoar", "marketplacev2" - ] + ], + "defaultDataSource": "AWS - GuardDuty Event Collector" } \ No newline at end of file diff --git a/Packs/AWS-IAM/ReleaseNotes/1_1_61.md b/Packs/AWS-IAM/ReleaseNotes/1_1_61.md new file mode 100644 index 000000000000..2579c4c26952 --- /dev/null +++ b/Packs/AWS-IAM/ReleaseNotes/1_1_61.md @@ -0,0 +1,3 @@ +## AWS - IAM + +- Locked dependencies of the pack to ensure stability for versioned core packs. No changes in this release. diff --git a/Packs/AWS-IAM/pack_metadata.json b/Packs/AWS-IAM/pack_metadata.json index ab8b30f1e519..7ed76c68c188 100644 --- a/Packs/AWS-IAM/pack_metadata.json +++ b/Packs/AWS-IAM/pack_metadata.json @@ -3,7 +3,7 @@ "description": "Amazon Web Services Identity and Access Management (IAM)", "support": "xsoar", "author": "Cortex XSOAR", - "currentVersion": "1.1.60", + "currentVersion": "1.1.61", "url": "https://www.paloaltonetworks.com/cortex", "email": "", "created": "2020-04-14T00:00:00Z", diff --git a/Packs/AWS-IAMIdentityCenter/.secrets-ignore b/Packs/AWS-IAMIdentityCenter/.secrets-ignore index e69de29bb2d1..efab43fb9106 100644 --- a/Packs/AWS-IAMIdentityCenter/.secrets-ignore +++ b/Packs/AWS-IAMIdentityCenter/.secrets-ignore @@ -0,0 +1,2 @@ +test@example.com +johnDoe@gmail.com \ No newline at end of file diff --git a/Packs/AWS-IAMIdentityCenter/CONTRIBUTORS.json b/Packs/AWS-IAMIdentityCenter/CONTRIBUTORS.json new file mode 100644 index 000000000000..7c99f50dd6e5 --- /dev/null +++ b/Packs/AWS-IAMIdentityCenter/CONTRIBUTORS.json @@ -0,0 +1 @@ +["Sameh El-Hakim"] \ No newline at end of file diff --git a/Packs/AWS-IAMIdentityCenter/Integrations/AWSIAMIdentityCenter/AWSIAMIdentityCenter.py b/Packs/AWS-IAMIdentityCenter/Integrations/AWSIAMIdentityCenter/AWSIAMIdentityCenter.py index 8c3c38193367..456638b75f56 100644 --- a/Packs/AWS-IAMIdentityCenter/Integrations/AWSIAMIdentityCenter/AWSIAMIdentityCenter.py +++ b/Packs/AWS-IAMIdentityCenter/Integrations/AWSIAMIdentityCenter/AWSIAMIdentityCenter.py @@ -2,289 +2,816 @@ from CommonServerPython import * # noqa: F401 from AWSApiModule import * -import urllib3.util -# Disable insecure warnings -urllib3.disable_warnings() - -param = demisto.params() +''' CONSTANTS ''' SERVICE = 'identitystore' -IDENTITYSTOREID = param.get('IdentityStoreId') - - -def create_user(args, client): # pragma: no cover - username = demisto.getArg('userName') - familyName = demisto.getArg('familyName') - givenName = demisto.getArg('givenName') - userEmail = demisto.getArg('userEmailAddress') - userDisplayName = demisto.getArg('displayName') - - response = client.create_user( - IdentityStoreId=f'{IDENTITYSTOREID}', - UserName=f'{username}', - Name={ - 'FamilyName': f'{familyName}', - 'GivenName': f'{givenName}' +PREFIX = 'AWS.IAMIdentityCenter' +PREFIX_USER = 'AWS.IAMIdentityCenter.User' +PREFIX_GROUP = 'AWS.IAMIdentityCenter.Group' + +''' HELPER FUNCTIONS ''' + + +def get_userId_by_username(args: dict, client: Any, IdentityStoreId: str) -> str: + """ + Retrieve the User ID associated with a given username from the AWS IAM Identity Center using the provided client. + + Args: + args: The command arguments containing the 'userName'. + client: The client object used to interact with the Identity Store service. + IdentityStoreId: The ID of the Identity Store where the user is registered. + + Returns: + str: The User ID associated with the provided username, or None if not found. + """ + user_name = args.get('userName') + response_id = client.get_user_id( + IdentityStoreId=IdentityStoreId, + AlternateIdentifier={ + 'UniqueAttribute': { + 'AttributePath': 'userName', + 'AttributeValue': user_name + } + } + ) + return response_id.get('UserId') + + +def get_user_operations_list(args: dict) -> list: + """ + Generates a list of operations to update user information based on the provided arguments. + + Args: + args: A dictionary containing user information. + + Returns: + list: A list of dictionaries representing the operations to be performed on user attributes. + Each dictionary contains 'AttributePath' and 'AttributeValue' corresponding to the + attribute path and its updated value respectively. + + Raises: + RuntimeError: If 'userEmailAddressPrimary' is specified without 'userEmailAddress'. + """ + primary = args.get('userEmailAddressPrimary') + user_email_address = args.get('userEmailAddress') + if primary and not user_email_address: + return_error('Error: When specifying userEmailAddressPrimary, userEmailAddress must also be provided.') + if primary: + primary = argToBoolean(primary) + path_and_value = { + 'name.familyName': args.get('familyName'), + 'name.givenName': args.get('givenName'), + 'emails': [{ + 'value': user_email_address, + 'primary': primary + }], + 'displayName': args.get('displayName'), + 'userType': args.get('userType'), + 'profileUrl': args.get('profileUrl'), + 'title': args.get('title'), + 'addresses': [{ + 'region': args.get('region') + }] + } + path_and_value = remove_empty_elements(path_and_value) + to_update = [] + for path in path_and_value: + to_update.append({ + 'AttributePath': path, + 'AttributeValue': path_and_value[path] + }) + + return to_update + + +def get_limit(args: dict) -> int: + """ + Get the limit value specified in the arguments. + + Args: + args: A dictionary containing the 'limit' argument. + + Returns: + int: The limit value if specified and less than 50, otherwise returns 50 as the default limit. + """ + if limit := args.get('limit'): + return min(int(limit), 50) + + return 50 + + +def get_groupId_by_displayName(args: dict, client: Any, IdentityStoreId: str) -> str: + """ + Retrieve the Group ID associated with a given display name or group name + from the AWS IAM Identity Center using the provided client. + + Args: + args: A dictionary containing the display name or group name to search for. + client: The client object used to interact with the Identity Store service. + IdentityStoreId: The ID of the Identity Store where the group is registered. + + Returns: + str: The Group ID associated with the provided display name or group name. + """ + group_name = args.get('displayName') or args.get('groupName') + response_id = client.get_group_id( + IdentityStoreId=IdentityStoreId, + AlternateIdentifier={ + 'UniqueAttribute': { + 'AttributePath': 'displayName', + 'AttributeValue': group_name + } + } + ) + return response_id.get('GroupId') + + +def get_group_memberships_for_member(args: dict, client: Any, IdentityStoreId: str) -> list: + """ + Retrieve group memberships for a member (user) from the AWS IAM Identity Center based on the provided arguments. + + Args: + args: A dictionary containing information required to identify the member (user). + client: The client object used to interact with the IAM Identity Center service. + IdentityStoreId: The ID of the Identity Store where the member is registered. + + Returns: + list: A list containing the membership IDs of groups to which the member belongs. + """ + memberships_of_member = [] + user_id = get_userId_by_username(args, client, IdentityStoreId) + kwargs = { + 'IdentityStoreId': IdentityStoreId, + 'MemberId': { + 'UserId': user_id + } + } + kwargs = remove_empty_elements(kwargs) + groups_response = client.list_group_memberships_for_member(**kwargs) + for group in groups_response.get('GroupMemberships', []): + memberships_of_member.append(group.get('MembershipId')) + + return memberships_of_member + + +def update_groups_and_memberships(last_data, current_data: list, key: str, id_value: Any, new_data: str): + """ + Update groups and memberships based on the provided parameters. + + Args: + last_data: The previous data containing group memberships. + current_data: The current data containing group memberships. + key: The key to identify the item in the data (e.g., 'id'). + id_value: The value of the key to match against (e.g., user ID). + new_data: The key representing the updated data (e.g., 'groups'). + + Returns: + list: The updated list of group memberships. + """ + updated_list = [] + if not isinstance(last_data, list): + last_data = [last_data] + + for item_data in last_data: + if item_data.get(key) == id_value: + updated_list = item_data.get(new_data, []) + break + + if updated_list: + combined_data = updated_list + [g for g in current_data if g not in updated_list] + final_data = combined_data + else: + final_data = current_data + + return final_data + + +def create_user(args: dict, client: Any, IdentityStoreId: str) -> None: + """ + Creates a user with the provided arguments. + + Args: + args: The command arguments + client: The client object to interact with the API. + IdentityStoreId: The ID of the identity store. + """ + user_name = args.get('userName') + family_name = args.get('familyName') + given_name = args.get('givenName') + user_email = args.get('userEmailAddress') + user_display_name = args.get('displayName') + user_type = args.get('userType') + profile_url = args.get('profileUrl') + title = args.get('title') + region = args.get('region') + primary_email = args.get('userEmailAddressPrimary') + if primary_email and not user_email: + return_error('Error: When specifying userEmailAddressPrimary, userEmailAddress must also be provided.') + if primary_email: + primary_email = argToBoolean(primary_email) + + kwargs = { + 'IdentityStoreId': IdentityStoreId, + 'UserName': user_name, + 'Name': { + 'FamilyName': family_name, + 'GivenName': given_name }, - Emails=[ + 'Emails': [ { - 'Value': f'{userEmail}', - 'Type': 'work', - 'Primary': True - }, + 'Value': user_email, + 'Primary': primary_email + } ], - DisplayName=f'{userDisplayName}' - ) - ec = {'AWS.IAMIdentityCenter.Users': response} - human_readable = tableToMarkdown('AWS IAM Identity Center Users', response) - return_outputs(human_readable, ec) - - -def get_user(args, client): # pragma: no cover - data = [] - userName = demisto.getArg('userName') - response = client.list_users( - IdentityStoreId=f'{IDENTITYSTOREID}', - Filters=[ + 'DisplayName': user_display_name, + 'UserType': user_type, + 'ProfileUrl': profile_url, + 'Title': title, + 'Addresses': [ { - 'AttributePath': 'UserName', - 'AttributeValue': f'{userName}' - }, + 'Region': region, + } ] + } + kwargs = remove_empty_elements(kwargs) + response = client.create_user(**kwargs) + user_id = response.get('UserId') + response.pop('ResponseMetadata', None) + response = remove_empty_elements(response) + human_readable = tableToMarkdown(f'User {user_name} has been successfully created with user id {user_id}', response) + result = CommandResults( + outputs_prefix=PREFIX_USER, + readable_output=human_readable, + outputs=response + ) + return_results(result) + + +def update_user(args: dict, client: Any, IdentityStoreId: str) -> None: + """ + Update user information based on the provided arguments. + + Args: + args: A dictionary containing user information to be updated. + client: The client object used to interact with the Identity Store service. + IdentityStoreId: The ID of the Identity Store where the user is registered. + """ + user_name = args.get('userName') + user_id = get_userId_by_username(args, client, IdentityStoreId) + operations = get_user_operations_list(args) + kwargs = { + 'IdentityStoreId': IdentityStoreId, + 'UserId': user_id, + 'Operations': operations + } + client.update_user(**kwargs) + hr_data = f'User {user_name} has been successfully updated' + result = CommandResults( + readable_output=hr_data + ) + return_results(result) + + +def delete_user(args: dict, client: Any, IdentityStoreId: str) -> None: + """ + Delete a user from the AWS IAM Identity Center based on the provided arguments. + + Args: + args: A dictionary containing information required to identify the user to be deleted. + client: The client object used to interact with the Identity Store service. + IdentityStoreId: The ID of the Identity Store where the user is registered. + """ + user_id = get_userId_by_username(args, client, IdentityStoreId) + client.delete_user( + IdentityStoreId=IdentityStoreId, + UserId=user_id ) - rep = json.dumps(response) - repJSON = json.loads(rep) - datas = repJSON.get('Users', []) - for da in datas: - for user in response['Users']: - user_details = { - 'UserName': user['UserName'], - 'UserId': user['UserId'], - 'Email': user['Emails'][0]['Value'], - 'DisplayName': user['DisplayName'] - } - userID = user['UserId'] - data.append(user_details) - ec = {'AWS.IAM.IdentityCenter.Users': data} - human_readable = tableToMarkdown('AWS IAM Users', data, removeNull=True) - return_outputs(human_readable, ec) - return userID - -def get_user_by_email(args, client): # pragma: no cover - data = [] - emailArg = demisto.getArg('emailAddress') - response = client.list_users( - IdentityStoreId=f'{IDENTITYSTOREID}', + hr_data = f'The User {user_id} has been removed.' + demisto.debug(hr_data) + result = CommandResults( + readable_output=hr_data ) - rep = json.dumps(response) - repJSON = json.loads(rep) - datas = repJSON.get('Users', []) - for da in datas: - for user in response['Users']: - userEmail = user['Emails'][0]['Value'] - if userEmail == emailArg: - user_details = { - 'UserName': user['UserName'], - 'UserId': user['UserId'], - 'Email': user['Emails'][0]['Value'], - 'DisplayName': user['DisplayName'] - } - userID = user['UserId'] - data.append(user_details) - ec = {'AWS.IAM.IdentityCenter.Users': data[0]} - human_readable = tableToMarkdown('AWS IAM Users ', data[0], removeNull=True) - return_outputs(human_readable, ec) - return userID - - -def list_users(args, client): # pragma: no cover - data = [] - response = client.list_users( - IdentityStoreId=f'{IDENTITYSTOREID}', + return_results(result) + + +def get_user(args: dict, client: Any, IdentityStoreId: str) -> None: + """ + Retrieve user information from the AWS IAM Identity Center based on the provided arguments. + + Args: + args: A dictionary containing information required to identify the user to be retrieved. + client: The client object used to interact with the Identity Store service. + IdentityStoreId: The ID of the Identity Store where the user is registered. + """ + response_id = get_userId_by_username(args, client, IdentityStoreId) + response = client.describe_user( + IdentityStoreId=IdentityStoreId, + UserId=response_id ) - rep = json.dumps(response) - repJSON = json.loads(rep) - datas = repJSON.get('Users', []) - for da in datas: - for user in response['Users']: - user_details = { - 'UserName': user['UserName'], - 'UserId': user['UserId'] - } - data.append(user_details) - ec = {'AWS.IAM.IdentityCenter.Users': data} - human_readable = tableToMarkdown('AWS IAM Identity Center Users', data, removeNull=True) - return_outputs(human_readable, ec) + response.pop('ResponseMetadata', None) + hr_data = { + 'UserId': response.get('UserId'), + 'UserName': response.get('UserName'), + 'DisplayName': response.get('DisplayName'), + } + if response.get('Emails'): + emails = [email.get('Value') for email in response.get('Emails')] + hr_data['Emails'] = emails + + human_readable = tableToMarkdown('AWS IAM Identity Center Users', hr_data, removeNull=True) + result = CommandResults( + outputs_prefix=PREFIX_USER, + readable_output=human_readable, + outputs_key_field='UserId', + outputs=response + ) + return_results(result) -def list_groups(args, client): # pragma: no cover - data = [] - response = client.list_groups( - IdentityStoreId=f'{IDENTITYSTOREID}', +def get_user_by_email(args: dict, client: Any, IdentityStoreId: str) -> None: + """ + Retrieve user information from the AWS IAM Identity Center based on the provided email address. + + Args: + args: A dictionary containing the email address of the user to be retrieved. + client: The client object used to interact with the Identity Store service. + IdentityStoreId: The ID of the Identity Store where the user is registered. + """ + email_arg = args.get('emailAddress') + response = client.list_users( + IdentityStoreId=IdentityStoreId, ) - rep = json.dumps(response) - repJSON = json.loads(rep) - datas = repJSON.get('Groups', []) - for da in datas: - for group in response['Groups']: - group_details = { - 'DisplayName': group['DisplayName'], - 'GroupId': group['GroupId'] + email_exists = False + hr_data = {} + context_data = {} + for user in response.get('Users'): + user_emails = user.get('Emails', []) + user_emails_values = [email.get('Value') for email in user_emails] + if email_arg in user_emails_values: + email_exists = True + user_details = { + 'UserName': user.get('UserName'), + 'UserId': user.get('UserId'), + 'Emails': user_emails_values, + 'DisplayName': user.get('DisplayName') } - data.append(group_details) - ec = {'AWS.IAM.IdentityCenter.Groups': data} - human_readable = tableToMarkdown('AWS IAM Identity Center Groups', data) - return_outputs(human_readable, ec) - - -def get_group(args, client): # pragma: no cover - data = [] - groupName = demisto.getArg('groupName') - response = client.list_groups( - IdentityStoreId=f'{IDENTITYSTOREID}', - Filters=[ - { - 'AttributePath': 'DisplayName', - 'AttributeValue': f'{groupName}' - }, - ] + hr_data = user_details + context_data = user + break + + if not email_exists: + return_error(f'User with the email {email_arg} was not found.') + + human_readable = tableToMarkdown('AWS IAM Identity Center Users ', hr_data, removeNull=True) + result = CommandResults( + outputs_prefix=PREFIX_USER, + readable_output=human_readable, + outputs_key_field='UserId', + outputs=context_data ) - rep = json.dumps(response) - repJSON = json.loads(rep) - datas = repJSON.get('Groups', []) - for da in datas: - for group in response['Groups']: - group_details = { - 'DisplayName': group['DisplayName'], - 'GroupId': group['GroupId'] - } - groupID = group['GroupId'] - data.append(group_details) - ec = {'AWS.IAM.IdentityCenter.Groups': data} - human_readable = tableToMarkdown('AWS IAM Identity Center Groups', data) - return_outputs(human_readable, ec) - return groupID - - -def list_groups_for_user(args, client): # pragma: no cover - data = [] - userName = demisto.getArg('userName') - userID = get_user(args, client) - response = client.list_group_memberships_for_member( - IdentityStoreId=f'{IDENTITYSTOREID}', - MemberId={ - 'UserId': f'{userID}' + return_results(result) + + +def list_users(args: dict, client: Any, IdentityStoreId: str) -> None: + """ + List users from the AWS IAM Identity Center based on the provided arguments. + + Args: + args: A dictionary containing optional parameters such as 'limit' and 'nextToken'. + client: The client object used to interact with the Identity Store service. + IdentityStoreId: The ID of the Identity Store from which users are listed. + """ + context_data = [] + hr_data = [] + kwargs = { + 'IdentityStoreId': IdentityStoreId, + 'MaxResults': get_limit(args), + 'NextToken': args.get('nextToken') + } + kwargs = remove_empty_elements(kwargs) + response = client.list_users(**kwargs) + for user in response.get('Users', []): + context_data.append(user) + user_details = { + 'UserId': user.get('UserId'), + 'UserName': user.get('UserName'), + 'DisplayName': user.get('DisplayName'), } + if user.get('Emails'): + emails = [email.get('Value') for email in user.get('Emails')] + user_details['Emails'] = emails + + hr_data.append(user_details) + + outputs = {f'{PREFIX_USER}(val.UserId === obj.UserId)': context_data, + f'{PREFIX}(true)': {'UserNextToken': response.get('NextToken')}} + human_readable = tableToMarkdown('AWS IAM Identity Center Users', hr_data, removeNull=True) + result = CommandResults( + readable_output=human_readable, + outputs_key_field='UserId', + outputs=outputs ) - rep = json.dumps(response) - repJSON = json.loads(rep) - datas = repJSON.get('GroupMemberships', []) - for da in datas: - for group in response['GroupMemberships']: - group_details = { - 'GroupId': group['GroupId'], - 'MembershipId': group['MembershipId'] - } - membershipID = group['MembershipId'] - data.append(group_details) - ec = {'AWS.IAM.IdentityCenter.Users.GroupMemeberships': data} - human_readable = tableToMarkdown(f'AWS IAM Identity Center Group for user {userName} ', data) - return_outputs(human_readable, ec) - return membershipID + return_results(result) + + +def list_groups(args: dict, client: Any, IdentityStoreId: str) -> None: + """ + List groups from the AWS IAM Identity Center based on the provided arguments. + + Args: + args: A dictionary containing optional parameters such as 'limit' and 'nextToken'. + client: The client object used to interact with the Identity Store service. + IdentityStoreId: The ID of the Identity Store from which groups are listed. + """ + context_data = [] + hr_data = [] + kwargs = { + 'IdentityStoreId': IdentityStoreId, + 'MaxResults': get_limit(args), + 'NextToken': args.get('nextToken') + } + kwargs = remove_empty_elements(kwargs) + response = client.list_groups(**kwargs) + for group in response.get('Groups', []): + group_details = { + 'DisplayName': group.get('DisplayName'), + 'GroupId': group.get('GroupId'), + 'Description': group.get('Description') + } + hr_data.append(group_details) + context_data.append(group) + + outputs = {f'{PREFIX_GROUP}(val.GroupId === obj.GroupId)': context_data, + f'{PREFIX}(true)': {'GroupNextToken': response.get('NextToken')}} + human_readable = tableToMarkdown('AWS IAM Identity Center Groups', hr_data, removeNull=True) + result = CommandResults( + readable_output=human_readable, + outputs_key_field='GroupId', + outputs=outputs + ) + return_results(result) + + +def create_group(args: dict, client: Any, IdentityStoreId: str) -> None: + """ + Create a group in the Identity Store based on the provided arguments. + + Args: + args: A dictionary containing the group information such as 'displayName' and 'description'. + client: The client object used to interact with the Identity Store service. + IdentityStoreId: The ID of the Identity Store where the group is to be created. + """ + display_name = args.get('displayName') + kwargs = { + 'IdentityStoreId': IdentityStoreId, + 'DisplayName': display_name, + 'Description': args.get('description') + } + kwargs = remove_empty_elements(kwargs) + response = client.create_group(**kwargs) + group_id = response.get('GroupId') + response.pop('ResponseMetadata', None) + human_readable = tableToMarkdown(f'Group {display_name} has been successfully created with id {group_id}', response) + result = CommandResults( + outputs_prefix=PREFIX_GROUP, + readable_output=human_readable, + outputs=response + ) + return_results(result) + + +def delete_group(args: dict, client: Any, IdentityStoreId: str) -> None: + """ + Delete a group from the AWS IAM Identity Center based on the provided arguments. + + Args: + args: A dictionary containing information required to identify the group to be deleted. + client: The client object used to interact with the Identity Store service. + IdentityStoreId: The ID of the Identity Store where the group is registered. + """ + group_id = get_groupId_by_displayName(args, client, IdentityStoreId) + client.delete_group( + IdentityStoreId=IdentityStoreId, + GroupId=group_id + ) + + hr_data = f'The Group {group_id} has been removed.' + demisto.debug(hr_data) + result = CommandResults( + readable_output=hr_data + ) + return_results(result) + + +def update_group(args: dict, client: Any, IdentityStoreId: str) -> None: + """ + Update group information in the Identity Store based on the provided arguments. + + Args: + args: A dictionary containing the group information to be updated, such as 'displayName' and 'description'. + client: The client object used to interact with the Identity Store service. + IdentityStoreId: The ID of the Identity Store where the group is registered. + """ + display_name = args.get('displayName') + group_id = get_groupId_by_displayName(args, client, IdentityStoreId) + kwargs = { + 'IdentityStoreId': IdentityStoreId, + 'GroupId': group_id, + 'Operations': [{ + 'AttributePath': 'description', + 'AttributeValue': args.get('description') + }] + } + client.update_group(**kwargs) + hr_data = f'Group {display_name} has been successfully updated' + result = CommandResults( + readable_output=hr_data + ) + return_results(result) + + +def get_group(args: dict, client: Any, IdentityStoreId: str) -> None: + """ + Retrieve group information from the AWS IAM Identity Center based on the provided arguments. + + Args: + args: A dictionary containing information required to identify the group to be retrieved. + client: The client object used to interact with the Identity Store service. + IdentityStoreId: The ID of the Identity Store where the group is registered. + """ + response_id = get_groupId_by_displayName(args, client, IdentityStoreId) + response = client.describe_group( + IdentityStoreId=IdentityStoreId, + GroupId=response_id + ) + hr_data = { + 'DisplayName': response.get('DisplayName'), + 'GroupId': response.get('GroupId') + } + response.pop('ResponseMetadata', None) + human_readable = tableToMarkdown('AWS IAM Identity Center Groups', hr_data, removeNull=True) + result = CommandResults( + outputs_prefix=PREFIX_GROUP, + readable_output=human_readable, + outputs_key_field='GroupId', + outputs=response + ) + return_results(result) + + +def list_groups_for_user(args: dict, client: Any, IdentityStoreId: str) -> None: + """ + List groups associated with a user from the AWS IAM Identity Center based on the provided arguments. + + Args: + args: A dictionary containing information required to identify the user + and optional parameters such as 'limit' and 'nextToken'. + client: The client object used to interact with the IAM Identity Center service. + IdentityStoreId: The ID of the Identity Store from which groups are listed for the user. + """ + hr_data = [] + context_data: Dict[str, Any] = {} + user_id = get_userId_by_username(args, client, IdentityStoreId) + kwargs = { + 'IdentityStoreId': IdentityStoreId, + 'MemberId': { + 'UserId': user_id + }, + 'MaxResults': get_limit(args), + 'NextToken': args.get('nextToken') + } + kwargs = remove_empty_elements(kwargs) + response = client.list_group_memberships_for_member(**kwargs) + groups: list = [] + for group in response.get('GroupMemberships', []): + hr_data.append({ + 'UserID': user_id, + 'GroupID': group.get('GroupId'), + 'MembershipID': group.get('MembershipId') + }) + groups.append({ + 'GroupId': group.get('GroupId'), + 'MembershipId': group.get('MembershipId') + }) + + context_data['UserId'] = user_id + context_data['GroupsUserNextToken'] = response.get('NextToken') + last_context = demisto.context() + last_users = last_context.get('AWS', {}).get('IAMIdentityCenter', {}).get('User', {}) + + final_groups = update_groups_and_memberships(last_users, groups, 'UserId', user_id, 'GroupMemberships') + context_data['GroupMemberships'] = final_groups + + human_readable = tableToMarkdown('AWS IAM Identity Center Groups', hr_data, removeNull=True) + result = CommandResults( + outputs_prefix=PREFIX_USER, + readable_output=human_readable, + outputs_key_field='UserId', + outputs=context_data + ) + return_results(result) -def add_user_to_group(args, client): # pragma: no cover - userID = get_user(args, client) - GroupID = get_group(args, client) +def add_user_to_group(args: dict, client: Any, IdentityStoreId: str) -> None: + """ + Add a user to a group in the AWS IAM Identity Center based on the provided arguments. + + Args: + args: A dictionary containing information required to identify the user and group. + client: The client object used to interact with the IAM Identity Center service. + IdentityStoreId: The ID of the Identity Store where the user and group are registered. + """ + user_id = get_userId_by_username(args, client, IdentityStoreId) + Group_id = get_groupId_by_displayName(args, client, IdentityStoreId) response = client.create_group_membership( - IdentityStoreId=f'{IDENTITYSTOREID}', - GroupId=f'{GroupID}', + IdentityStoreId=IdentityStoreId, + GroupId=Group_id, MemberId={ - 'UserId': f'{userID}' + 'UserId': user_id } ) - if response['ResponseMetadata']['HTTPStatusCode'] == 200: - demisto.results("The user {0} was added to the IAM group: {1}".format(args.get('userName'), - args.get( - 'groupName'))) - - -def remove_user_from_groups(args, client): # pragma: no cover - membershipID = list_groups_for_user(args, client) - response = client.delete_group_membership( - IdentityStoreId=f'{IDENTITYSTOREID}', - MembershipId=f'{membershipID}' + membership_id = response.get('MembershipId') + hr_data = f'The membership id {membership_id} has been successfully created.' + result = CommandResults( + readable_output=hr_data + ) + return_results(result) + + +def delete_group_membership(args: dict, client: Any, IdentityStoreId: str) -> None: + """ + Delete group memberships for a user in the AWS IAM Identity Center based on the provided arguments. + + Args: + args: A dictionary containing information required to identify the user or membership(s) to be deleted. + client: The client object used to interact with the IAM Identity Center service. + IdentityStoreId: The ID of the Identity Store where the user and group memberships are registered. + """ + memberships_to_delete = [] + membership_id = args.get('membershipId') + user_name = args.get('userName') + if membership_id and user_name: + return_error('Please provide only one of userName or membershipId.') + elif membership_id: + memberships_to_delete = argToList(membership_id) + elif user_name: + memberships_to_delete = get_group_memberships_for_member(args, client, IdentityStoreId) + if not memberships_to_delete: + return_error('User is not member of any group.') + else: + return_error('userName or membershipId must be provided.') + + for member in memberships_to_delete: + client.delete_group_membership( + IdentityStoreId=IdentityStoreId, + MembershipId=member + ) + + hr_data = f'The membership with ids {memberships_to_delete} have been deleted.' + demisto.debug(hr_data) + result = CommandResults( + readable_output=hr_data ) - if response['ResponseMetadata']['HTTPStatusCode'] == 200: - demisto.results( - "The User {0} has been removed from the group {1}".format(args.get('userName'), - args.get('groupName'))) + return_results(result) + + +def list_group_memberships(args: dict, client: Any, IdentityStoreId: str) -> None: + """ + List memberships of a group from the AWS IAM Identity Center based on the provided arguments. + + Args: + args: A dictionary containing information required to identify the group + and optional parameters such as 'limit' and 'nextToken'. + client: The client object used to interact with the IAM Identity Center service. + IdentityStoreId: The ID of the Identity Store where the group memberships are registered. + """ + hr_data = [] + context_data: Dict[str, Any] = {} + group_id = get_groupId_by_displayName(args, client, IdentityStoreId) + kwargs = { + 'IdentityStoreId': IdentityStoreId, + 'GroupId': group_id, + 'MaxResults': get_limit(args), + 'NextToken': args.get('nextToken') + } + kwargs = remove_empty_elements(kwargs) + response = client.list_group_memberships(**kwargs) + memberships = [] + for membership in response.get('GroupMemberships', []): + member_details = { + 'MembershipId': membership.get('MembershipId'), + 'GroupId': group_id, + 'UserId': membership.get('MemberId', {}).get('UserId') + } + hr_data.append(member_details) + memberships.append({ + 'MembershipId': membership.get('MembershipId'), + 'UserId': membership.get('MemberId', {}).get('UserId') + }) + + context_data['GroupId'] = group_id + context_data['GroupMembershipNextToken'] = response.get('NextToken') + last_context = demisto.context() + last_groups = last_context.get('AWS', {}).get('IAMIdentityCenter', {}).get('Group', {}) + + final_memberships = update_groups_and_memberships(last_groups, memberships, 'GroupId', group_id, 'GroupMemberships') + + context_data['GroupMemberships'] = final_memberships + human_readable = tableToMarkdown('AWS IAM Identity Center Groups', hr_data, removeNull=True) + result = CommandResults( + outputs_prefix=PREFIX_GROUP, + readable_output=human_readable, + outputs_key_field='GroupId', + outputs=context_data + ) + return_results(result) -def test_function(args, client): - response = client.list_users( - IdentityStoreId=f'{IDENTITYSTOREID}', +def test_module(args: dict, client: Any, IdentityStoreId: str) -> None: # pragma: no cover + """ Command to test the connection to the API""" + if not IdentityStoreId: + return_error('Identity Store ID was not specified - Test failure. The `Identity Store ID` parameter can be left empty ' + 'and included as an argument in every command.') + + client.list_users( + IdentityStoreId=IdentityStoreId, ) - if response['ResponseMetadata']['HTTPStatusCode'] == 200: - demisto.results('ok') + return_results('ok') def main(): # pragma: no cover params = demisto.params() + args = demisto.args() + IdentityStoreId: str = args.get('IdentityStoreId', "") or params.get('IdentityStoreId', "") aws_default_region = params.get('defaultRegion') - aws_role_arn = params.get('roleArn') + aws_role_arn = params.get('roleArnCredentials', {}).get('password') or params.get('roleArn') aws_role_session_name = params.get('roleSessionName') aws_role_session_duration = params.get('sessionDuration') - aws_role_policy = None - aws_access_key_id = params.get('access_key') - aws_secret_access_key = params.get('secret_key') + aws_access_key_id = params.get('credentials', {}).get('identifier') or params.get('access_key') + aws_secret_access_key = params.get('credentials', {}).get('password') or params.get('secret_key') verify_certificate = not params.get('insecure', True) timeout = params.get('timeout') retries = params.get('retries') or 5 - - validate_params(aws_default_region, aws_role_arn, aws_role_session_name, aws_access_key_id, - aws_secret_access_key) - - aws_client = AWSClient(aws_default_region, aws_role_arn, aws_role_session_name, aws_role_session_duration, - aws_role_policy, aws_access_key_id, aws_secret_access_key, verify_certificate, timeout, - retries) command = demisto.command() - args = demisto.args() - client = aws_client.aws_session( - service=SERVICE, - role_arn=args.get('roleArn'), - role_session_name=args.get('roleSessionName'), - role_session_duration=args.get('roleSessionDuration'), - ) try: - demisto.debug('Command being called is {command}'.format(command=command)) + validate_params(aws_default_region, aws_role_arn, aws_role_session_name, aws_access_key_id, + aws_secret_access_key) + + aws_client = AWSClient(aws_default_region, aws_role_arn, aws_role_session_name, aws_role_session_duration, + None, aws_access_key_id, aws_secret_access_key, verify_certificate, timeout, + retries) + + client: AWSClient = aws_client.aws_session( + service=SERVICE, + region=args.get('region'), + role_arn=args.get('roleArn'), + role_session_name=args.get('roleSessionName'), + role_session_duration=args.get('roleSessionDuration'), + ) + + demisto.debug(f'Command being called is {command}') if command == 'test-module': - test_function(args, client) + test_module(args, client, IdentityStoreId) elif command == 'aws-iam-identitycenter-create-user': - create_user(args, client) + create_user(args, client, IdentityStoreId) elif command == 'aws-iam-identitycenter-get-user': - get_user(args, client) + get_user(args, client, IdentityStoreId) elif command == 'aws-iam-identitycenter-get-user-by-email': - get_user_by_email(args, client) + get_user_by_email(args, client, IdentityStoreId) elif command == 'aws-iam-identitycenter-list-users': - list_users(args, client) + list_users(args, client, IdentityStoreId) elif command == 'aws-iam-identitycenter-list-groups': - list_groups(args, client) + list_groups(args, client, IdentityStoreId) elif command == 'aws-iam-identitycenter-get-group': - get_group(args, client) + get_group(args, client, IdentityStoreId) elif command == 'aws-iam-identitycenter-list-groups-for-user': - list_groups_for_user(args, client) + list_groups_for_user(args, client, IdentityStoreId) elif command == 'aws-iam-identitycenter-add-user-to-group': - add_user_to_group(args, client) - elif command == 'aws-iam-identitycenter-remove-user-from-all-groups': - remove_user_from_groups(args, client) - + add_user_to_group(args, client, IdentityStoreId) + elif command == 'aws-iam-identitycenter-delete-user': + delete_user(args, client, IdentityStoreId) + elif command == 'aws-iam-identitycenter-create-group': + create_group(args, client, IdentityStoreId) + elif command == 'aws-iam-identitycenter-delete-group': + delete_group(args, client, IdentityStoreId) + elif command == 'aws-iam-identitycenter-delete-group-membership': + delete_group_membership(args, client, IdentityStoreId) + elif command == 'aws-iam-identitycenter-list-memberships': + list_group_memberships(args, client, IdentityStoreId) + elif command == 'aws-iam-identitycenter-update-user': + update_user(args, client, IdentityStoreId) + elif command == 'aws-iam-identitycenter-update-group': + update_group(args, client, IdentityStoreId) + else: + raise NotImplementedError(f'Command {command} is not implemented in AWS - IAM Identity Center integration.') + + # Log exceptions and return errors except Exception as e: - return_error('Error has occurred in the AWS IAM Integration: {code}\n {message}'.format( - code=type(e), message=str(e))) + demisto.info(str(e)) + return_error(f'Failed to execute {command} command.\nError:\n{str(e)}') if __name__ in ('__builtin__', 'builtins', '__main__'): diff --git a/Packs/AWS-IAMIdentityCenter/Integrations/AWSIAMIdentityCenter/AWSIAMIdentityCenter.yml b/Packs/AWS-IAMIdentityCenter/Integrations/AWSIAMIdentityCenter/AWSIAMIdentityCenter.yml index 82eaaa8de1e1..b6d089713b0f 100644 --- a/Packs/AWS-IAMIdentityCenter/Integrations/AWSIAMIdentityCenter/AWSIAMIdentityCenter.yml +++ b/Packs/AWS-IAMIdentityCenter/Integrations/AWSIAMIdentityCenter/AWSIAMIdentityCenter.yml @@ -3,21 +3,30 @@ commonfields: version: -1 category: IT Services sectionOrder: -- Collect - Connect configuration: - display: Role Arn name: roleArn - required: true + required: false section: Connect type: 4 + hidden: true +- name: roleArnCredentials + required: false + type: 9 + displaypassword: Role Arn + hiddenusername: true - display: Role Session Name name: roleSessionName - required: true + required: false section: Connect type: 0 + additionalinfo: "A descriptive name for the assumed role session." - display: AWS Default Region + section: Connect name: defaultRegion + type: 15 + required: true options: - us-east-1 - us-east-2 @@ -37,53 +46,49 @@ configuration: - eu-west-3 - us-gov-east-1 - us-gov-west-1 - required: true - section: Collect - type: 15 - display: Role Session Duration - section: Connect name: sessionDuration type: 0 - required: false -- additionalinfo: The time in seconds till a timeout exception is reached. You can specify just the read timeout (for example 60) or also the connect timeout followed after a comma (for example 60,10). If a connect timeout is not specified, a default of 10 second will be used. - defaultvalue: 60,10 - display: Timeout - name: timeout - type: 0 section: Connect required: false -- additionalinfo: 'The maximum number of retry attempts when connection or throttling errors are encountered. Set to 0 to disable retries. The default value is 5 and the limit is 10. Note: Increasing the number of retries will increase the execution time.' - defaultvalue: "5" - display: Retries - name: retries +- additionalinfo: "The Identity Store ID parameter is required for API requests. It can be provided as a parameter or as an argument. If the Identity Store ID was not specified - Test failure." + display: Identity Store ID + name: IdentityStoreId type: 0 section: Connect required: false -- display: Trust any certificate (not secure) - name: insecure - required: false - section: Connect - type: 8 -- display: Use system proxy settings - name: proxy +- display: Access Key + name: access_key required: false section: Connect - type: 8 -- display: Identity Store ID - name: IdentityStoreId - required: true - section: Collect type: 4 + hidden: true - display: Secret Key name: secret_key - required: true - section: Collect + required: false + section: Connect type: 4 + hidden: true - display: Access Key - name: access_key - required: true - section: Collect - type: 4 + name: credentials + required: false + section: Connect + type: 9 + displaypassword: Secret Key +- display: Timeout + name: timeout + required: false + section: Connect + type: 0 + additionalinfo: "The time in seconds until a timeout exception is reached. You can specify just the read timeout (for example 60) or also the connect timeout followed after a comma (for example 60,10). If a connect timeout is not specified, a default of 10 seconds will be used." + defaultvalue: 60,10 +- display: Retries + name: retries + required: false + section: Connect + type: 0 + additionalinfo: "The maximum number of retry attempts when connection or throttling errors are encountered. Set to 0 to disable retries. The default value is 5 and the limit is 10. Note: Increasing the number of retries will increase the execution time." + defaultvalue: 5 - display: AWS STS Regional Endpoints additionalinfo: Sets the AWS_STS_REGIONAL_ENDPOINTS environment variable to specify the AWS STS endpoint resolution logic. By default, this option is set to “legacy” in AWS. Leave empty if the environment variable is already set using server configuration. name: sts_regional_endpoint @@ -93,6 +98,17 @@ configuration: type: 15 section: Connect required: false +- display: Trust any certificate (not secure) + name: insecure + required: false + section: Connect + type: 8 +- display: Use system proxy settings + name: proxy + type: 8 + section: Connect + advanced: true + required: false description: Amazon Web Services IAM Identity Center. display: AWS - IAM Identity Center name: AWS - IAM Identity Center @@ -104,73 +120,514 @@ script: required: true - description: The family name of the user to create. name: familyName - - description: The First name of the user to create. + required: true + - description: The first name of the user to create. name: givenName required: true - description: The email address of the user to create. name: userEmailAddress - required: true + required: false - description: The display name of the user to create. name: displayName required: true + - description: The profile URL of the user to create. + name: profileUrl + required: false + - description: Is this the primary email address for the associated resource? + name: userEmailAddressPrimary + required: false + auto: PREDEFINED + predefined: + - yes + - no + - description: The type of the user to create. + name: userType + required: false + - description: The title of the user to create. + name: title + required: false + - auto: PREDEFINED + description: The AWS Region. If not specified, the default region will be used. + name: region + predefined: + - us-east-1 + - us-east-2 + - us-west-1 + - us-west-2 + - ca-central-1 + - eu-west-1 + - eu-central-1 + - eu-west-2 + - ap-northeast-1 + - ap-northeast-2 + - ap-southeast-1 + - ap-southeast-2 + - ap-south-1 + - sa-east-1 + - eu-north-1 + - eu-west-3 + required: false + - description: The Amazon Resource Name. + name: roleArn + required: false + - description: Role session duration. + name: roleSessionDuration + required: false + - description: Role session name. + name: roleSessionName + required: false + - description: Identity store ID. + name: IdentityStoreId + required: false description: Creates a new IAM Identity Center user for your AWS account. name: aws-iam-identitycenter-create-user outputs: - - contextPath: AWS.IAMIdentityCenter.Users.UserId - description: The date and time, when the user was created. - type: date + - contextPath: AWS.IAMIdentityCenter.User.UserId + description: The user ID. + type: string + - contextPath: AWS.IAMIdentityCenter.User.IdentityStoreId + description: Identity Store ID. + type: string - arguments: + - description: The AWS Region. If not specified, the default region will be used. + name: region + required: false + auto: PREDEFINED + predefined: + - us-east-1 + - us-east-2 + - us-west-1 + - us-west-2 + - ca-central-1 + - eu-west-1 + - eu-central-1 + - eu-west-2 + - ap-northeast-1 + - ap-northeast-2 + - ap-southeast-1 + - ap-southeast-2 + - ap-south-1 + - sa-east-1 + - eu-north-1 + - eu-west-3 + - description: The Amazon Resource Name. + name: roleArn + required: false + - description: Role session duration. + name: roleSessionDuration + required: false + - description: Role session name. + name: roleSessionName + required: false + - description: Identity store ID. + name: IdentityStoreId + required: false - description: The name of the user to get information about. name: userName required: true - description: Retrieves information about the specified IAM user, including the user creation date, path, unique ID, and ARN. + description: Retrieves information about the specified IAM user. name: aws-iam-identitycenter-get-user outputs: - - contextPath: AWS.IAM.IdentityCenter.Users.UserName + - contextPath: AWS.IAMIdentityCenter.User.UserName description: The friendly name identifying the user. type: string - - contextPath: AWS.IAM.IdentityCenter.Users.UserId + - contextPath: AWS.IAMIdentityCenter.User.UserId description: The stable and unique string identifying the user. type: string - - contextPath: AWS.IAM.IdentityCenter.Users.Email - description: The user email address. - type: string - - contextPath: AWS.IAM.IdentityCenter.Users.DisplayName - description: The user display name in AWS IAM IdentityCenter. - - arguments: [] + - contextPath: AWS.IAMIdentityCenter.User.ExternalIds.Issuer + description: The issuer for an external identifier. + type: String + - contextPath: AWS.IAMIdentityCenter.User.ExternalIds.Id + description: The identifier issued to this resource by an external identity provider. + type: String + - contextPath: AWS.IAMIdentityCenter.User.Name.Formatted + description: Formatted version of the user's name for display. + type: String + - contextPath: AWS.IAMIdentityCenter.User.Name.FamilyName + description: The family name of the user. + type: String + - contextPath: AWS.IAMIdentityCenter.User.Name.GivenName + description: The given name of the user. + type: String + - contextPath: AWS.IAMIdentityCenter.User.Name.MiddleName + description: The middle name of the user. + type: String + - contextPath: AWS.IAMIdentityCenter.User.Name.HonorificPrefix + description: The honorific prefix of the user. + type: String + - contextPath: AWS.IAMIdentityCenter.User.Name.HonorificSuffix + description: The honorific suffix of the user. + type: String + - contextPath: AWS.IAMIdentityCenter.User.DisplayName + description: The name of the user formatted for display when referenced. + type: String + - contextPath: AWS.IAMIdentityCenter.User.NickName + description: An alternate name for the user. + type: String + - contextPath: AWS.IAMIdentityCenter.User.ProfileUrl + description: URL associated with the user. + type: String + - contextPath: AWS.IAMIdentityCenter.User.Emails.Value + description: Email address associated with the user. + type: String + - contextPath: AWS.IAMIdentityCenter.User.Emails.Type + description: Type of email address. + type: String + - contextPath: AWS.IAMIdentityCenter.User.Emails.Primary + description: Indicates whether this is the primary email address. + type: String + - contextPath: AWS.IAMIdentityCenter.User.Addresses.StreetAddress + description: Street address. + type: String + - contextPath: AWS.IAMIdentityCenter.User.Addresses.Locality + description: Address locality. + type: String + - contextPath: AWS.IAMIdentityCenter.User.Addresses.Region + description: Region of the address. + type: String + - contextPath: AWS.IAMIdentityCenter.User.Addresses.PostalCode + description: Postal code of the address. + type: String + - contextPath: AWS.IAMIdentityCenter.User.Addresses.Country + description: Country of the address. + type: String + - contextPath: AWS.IAMIdentityCenter.User.Addresses.Formatted + description: Formatted version of the address for display. + type: String + - contextPath: AWS.IAMIdentityCenter.User.Addresses.Type + description: Type of address. + type: String + - contextPath: AWS.IAMIdentityCenter.User.Addresses.Primary + description: Indicates whether this is the primary address. + type: String + - contextPath: AWS.IAMIdentityCenter.User.PhoneNumbers.Value + description: Phone number associated with the user. + type: String + - contextPath: AWS.IAMIdentityCenter.User.PhoneNumbers.Type + description: Type of phone number. + type: String + - contextPath: AWS.IAMIdentityCenter.User.PhoneNumbers.Primary + description: Indicates whether this is the primary phone number. + type: String + - contextPath: AWS.IAMIdentityCenter.User.UserType + description: Type of user. + type: String + - contextPath: AWS.IAMIdentityCenter.User.Title + description: Title of the user. + type: String + - contextPath: AWS.IAMIdentityCenter.User.PreferredLanguage + description: Preferred language of the user. + type: String + - contextPath: AWS.IAMIdentityCenter.User.Locale + description: Geographical region or location of the user. + type: String + - contextPath: AWS.IAMIdentityCenter.User.Timezone + description: Time zone of the user. + type: String + - contextPath: AWS.IAMIdentityCenter.User.IdentityStoreId + description: Globally unique identifier for the identity store. + type: String + - arguments: + - auto: PREDEFINED + description: The AWS Region. If not specified, the default region will be used. + name: region + predefined: + - us-east-1 + - us-east-2 + - us-west-1 + - us-west-2 + - ca-central-1 + - eu-west-1 + - eu-central-1 + - eu-west-2 + - ap-northeast-1 + - ap-northeast-2 + - ap-southeast-1 + - ap-southeast-2 + - ap-south-1 + - sa-east-1 + - eu-north-1 + - eu-west-3 + required: false + - description: The Amazon Resource Name. + name: roleArn + required: false + - description: Role session duration. + name: roleSessionDuration + required: false + - description: Role session name. + name: roleSessionName + required: false + - description: Identity store ID. + name: IdentityStoreId + required: false + - description: Number of results to return. Maximum value is 50. + defaultValue: '50' + name: limit + required: false + - description: The pagination token. + name: nextToken + required: false description: Lists the IAM users, returns all users in the AWS account. name: aws-iam-identitycenter-list-users outputs: - - contextPath: AWS.IAMIdentityCenter.Users.UserName + - contextPath: AWS.IAMIdentityCenter.User.UserName description: The friendly name identifying the user. type: string - - contextPath: AWS.IAMIdentityCenter.Users.UserId + - contextPath: AWS.IAMIdentityCenter.User.UserId description: The stable and unique string identifying the user. type: string - - arguments: [] + - contextPath: AWS.IAMIdentityCenter.User.ExternalIds.Issuer + description: The issuer for an external identifier. + type: String + - contextPath: AWS.IAMIdentityCenter.User.ExternalIds.Id + description: The identifier issued to this resource by an external identity provider. + type: String + - contextPath: AWS.IAMIdentityCenter.User.Name.Formatted + description: Formatted version of the user's name for display. + type: String + - contextPath: AWS.IAMIdentityCenter.User.Name.FamilyName + description: The family name of the user. + type: String + - contextPath: AWS.IAMIdentityCenter.User.Name.GivenName + description: The given name of the user. + type: String + - contextPath: AWS.IAMIdentityCenter.User.Name.MiddleName + description: The middle name of the user. + type: String + - contextPath: AWS.IAMIdentityCenter.User.Name.HonorificPrefix + description: The honorific prefix of the user. + type: String + - contextPath: AWS.IAMIdentityCenter.User.Name.HonorificSuffix + description: The honorific suffix of the user. + type: String + - contextPath: AWS.IAMIdentityCenter.User.DisplayName + description: The name of the user formatted for display when referenced. + type: String + - contextPath: AWS.IAMIdentityCenter.User.NickName + description: An alternate name for the user. + type: String + - contextPath: AWS.IAMIdentityCenter.User.ProfileUrl + description: URL associated with the user. + type: String + - contextPath: AWS.IAMIdentityCenter.User.Emails.Value + description: Email address associated with the user. + type: String + - contextPath: AWS.IAMIdentityCenter.User.Emails.Type + description: Type of email address. + type: String + - contextPath: AWS.IAMIdentityCenter.User.Emails.Primary + description: Indicates whether this is the primary email address. + type: String + - contextPath: AWS.IAMIdentityCenter.User.Addresses.StreetAddress + description: Street address. + type: String + - contextPath: AWS.IAMIdentityCenter.User.Addresses.Locality + description: Address locality. + type: String + - contextPath: AWS.IAMIdentityCenter.User.Addresses.Region + description: Region of the address. + type: String + - contextPath: AWS.IAMIdentityCenter.User.Addresses.PostalCode + description: Postal code of the address. + type: String + - contextPath: AWS.IAMIdentityCenter.User.Addresses.Country + description: Country of the address. + type: String + - contextPath: AWS.IAMIdentityCenter.User.Addresses.Formatted + description: Formatted version of the address for display. + type: String + - contextPath: AWS.IAMIdentityCenter.User.Addresses.Type + description: Type of address. + type: String + - contextPath: AWS.IAMIdentityCenter.User.Addresses.Primary + description: Indicates whether this is the primary address. + type: String + - contextPath: AWS.IAMIdentityCenter.User.PhoneNumbers.Value + description: Phone number associated with the user. + type: String + - contextPath: AWS.IAMIdentityCenter.User.PhoneNumbers.Type + description: Type of phone number. + type: String + - contextPath: AWS.IAMIdentityCenter.User.PhoneNumbers.Primary + description: Indicates whether this is the primary phone number. + type: String + - contextPath: AWS.IAMIdentityCenter.User.UserType + description: Type of user. + type: String + - contextPath: AWS.IAMIdentityCenter.User.Title + description: Title of the user. + type: String + - contextPath: AWS.IAMIdentityCenter.User.PreferredLanguage + description: Preferred language of the user. + type: String + - contextPath: AWS.IAMIdentityCenter.User.Locale + description: Geographical region or location of the user. + type: String + - contextPath: AWS.IAMIdentityCenter.User.Timezone + description: Time zone of the user. + type: String + - contextPath: AWS.IAMIdentityCenter.User.IdentityStoreId + description: Globally unique identifier for the identity store. + type: String + - contextPath: AWS.IAMIdentityCenter.UserNextToken + description: Pagination token. + type: String + - arguments: + - auto: PREDEFINED + description: The AWS Region. If not specified, the default region will be used. + name: region + predefined: + - us-east-1 + - us-east-2 + - us-west-1 + - us-west-2 + - ca-central-1 + - eu-west-1 + - eu-central-1 + - eu-west-2 + - ap-northeast-1 + - ap-northeast-2 + - ap-southeast-1 + - ap-southeast-2 + - ap-south-1 + - sa-east-1 + - eu-north-1 + - eu-west-3 + required: false + - description: The Amazon Resource Name. + name: roleArn + required: false + - description: Role session duration. + name: roleSessionDuration + required: false + - description: Role session name. + name: roleSessionName + required: false + - description: Identity store ID. + name: IdentityStoreId + required: false + - description: Number of results to return. Maximum value is 50. + defaultValue: '50' + name: limit + required: false + - description: The pagination token. + name: nextToken + required: false description: Lists all the IAM groups in the AWS account. name: aws-iam-identitycenter-list-groups outputs: - - contextPath: AWS.IAM.IdentityCenter.Groups.GroupName - description: The friendly name that identifies the group. - type: string - - contextPath: AWS.IAM.IdentityCenter.Groups.GroupId - description: The stable and unique string identifying the group. - type: string + - contextPath: AWS.IAMIdentityCenter.Group.GroupId + description: The identifier for a group in the identity store. + type: String + - contextPath: AWS.IAMIdentityCenter.Group.DisplayName + description: The display name value for the group. + type: String + - contextPath: AWS.IAMIdentityCenter.Group.ExternalIds.Issuer + description: The issuer for an external identifier. + type: String + - contextPath: AWS.IAMIdentityCenter.Group.ExternalIds.Id + description: The identifier issued to this resource by an external identity provider. + type: String + - contextPath: AWS.IAMIdentityCenter.Group.Description + description: A description of the specified group. + type: String + - contextPath: AWS.IAMIdentityCenter.Group.IdentityStoreId + description: The globally unique identifier for the identity store. + type: String + - contextPath: AWS.IAMIdentityCenter.GroupNextToken + description: The pagination token used for the ListUsers and ListGroups API operations. + type: String - arguments: + - description: The AWS Region. If not specified, the default region will be used. + name: region + required: false + auto: PREDEFINED + predefined: + - us-east-1 + - us-east-2 + - us-west-1 + - us-west-2 + - ca-central-1 + - eu-west-1 + - eu-central-1 + - eu-west-2 + - ap-northeast-1 + - ap-northeast-2 + - ap-southeast-1 + - ap-southeast-2 + - ap-south-1 + - sa-east-1 + - eu-north-1 + - eu-west-3 + - description: The Amazon Resource Name. + name: roleArn + required: false + - description: Role session duration. + name: roleSessionDuration + required: false + - description: Role session name. + name: roleSessionName + required: false + - description: Identity store ID. + name: IdentityStoreId + required: false + - description: Number of results to return. Maximum value is 50. + defaultValue: '50' + name: limit + required: false + - description: The pagination token. + name: nextToken + required: false - description: The name of the user to list groups for. name: userName required: true - description: Lists the IAM groups that the specified IAM user belongs to. + description: Lists the IAM Identity Center groups that the specified IAM user belongs to. name: aws-iam-identitycenter-list-groups-for-user outputs: - - contextPath: AWS.IAM.IdentityCenter.Users.GroupMemeberships.GroupName + - contextPath: AWS.IAMIdentityCenter.User.UserId + description: User ID. + type: string + - contextPath: AWS.IAMIdentityCenter.User.GroupMemeberships.MembershipId description: The friendly name that identifies the group. type: string - - contextPath: AWS.IAM.IdentityCenter.Users.GroupMemeberships.GroupId + - contextPath: AWS.IAMIdentityCenter.User.GroupMemeberships.GroupId description: The stable and unique string identifying the group. type: string - arguments: + - description: The AWS Region. If not specified, the default region will be used. + name: region + required: false + auto: PREDEFINED + predefined: + - us-east-1 + - us-east-2 + - us-west-1 + - us-west-2 + - ca-central-1 + - eu-west-1 + - eu-central-1 + - eu-west-2 + - ap-northeast-1 + - ap-northeast-2 + - ap-southeast-1 + - ap-southeast-2 + - ap-south-1 + - sa-east-1 + - eu-north-1 + - eu-west-3 + - description: The Amazon Resource Name. + name: roleArn + required: false + - description: Role session duration. + name: roleSessionDuration + required: false + - description: Role session name. + name: roleSessionName + required: false + - description: Identity store ID. + name: IdentityStoreId + required: false - description: The name of the user to add. name: userName required: true @@ -180,40 +637,547 @@ script: description: Adds the specified user to the specified group. name: aws-iam-identitycenter-add-user-to-group - arguments: + - description: The AWS Region. If not specified, the default region will be used. + name: region + required: false + auto: PREDEFINED + predefined: + - us-east-1 + - us-east-2 + - us-west-1 + - us-west-2 + - ca-central-1 + - eu-west-1 + - eu-central-1 + - eu-west-2 + - ap-northeast-1 + - ap-northeast-2 + - ap-southeast-1 + - ap-southeast-2 + - ap-south-1 + - sa-east-1 + - eu-north-1 + - eu-west-3 + - description: The Amazon Resource Name. + name: roleArn + required: false + - description: Role session duration. + name: roleSessionDuration + required: false + - description: Role session name. + name: roleSessionName + required: false + - description: Identity store ID. + name: IdentityStoreId + required: false - description: The name of the group to search. - name: groupName + name: displayName required: true description: Get AWS IAM Identity Center group Information. name: aws-iam-identitycenter-get-group + outputs: + - contextPath: AWS.IAMIdentityCenter.Group.GroupId + description: The identifier for a group in the identity store. + type: String + - contextPath: AWS.IAMIdentityCenter.Group.DisplayName + description: The display name of the group. + type: String + - contextPath: AWS.IAMIdentityCenter.Group.ExternalIds.Issuer + description: The issuer for an external identifier. + type: String + - contextPath: AWS.IAMIdentityCenter.Group.ExternalIds.Id + description: The identifier issued to this resource by an external identity provider. + type: String + - contextPath: AWS.IAMIdentityCenter.Group.Description + description: A description of the group. + type: String + - contextPath: AWS.IAMIdentityCenter.Group.IdentityStoreId + description: The globally unique identifier for the identity store. + type: String - arguments: - - description: Username that will be removed from all groups. - name: userName - required: true - description: This will remove the entered user from all groups/memberships. - name: aws-iam-identitycenter-remove-user-from-all-groups - - arguments: - - description: The email of the user to be removed. + - description: The AWS Region. If not specified, the default region will be used. + name: region + required: false + auto: PREDEFINED + predefined: + - us-east-1 + - us-east-2 + - us-west-1 + - us-west-2 + - ca-central-1 + - eu-west-1 + - eu-central-1 + - eu-west-2 + - ap-northeast-1 + - ap-northeast-2 + - ap-southeast-1 + - ap-southeast-2 + - ap-south-1 + - sa-east-1 + - eu-north-1 + - eu-west-3 + - description: The Amazon Resource Name. + name: roleArn + required: false + - description: Role session duration. + name: roleSessionDuration + required: false + - description: Role session name. + name: roleSessionName + required: false + - description: Identity store ID. + name: IdentityStoreId + required: false + - description: The email of the user. name: emailAddress + required: true + description: Retrieves information about the specified IAM user. + name: aws-iam-identitycenter-get-user-by-email outputs: - - contextPath: AWS.IAM.IdentityCenter.Users.UserName + - contextPath: AWS.IAMIdentityCenter.User.UserName description: The friendly name identifying the user. type: string - - contextPath: AWS.IAM.IdentityCenter.Users.Email - description: The email address identifying the user. + - contextPath: AWS.IAMIdentityCenter.User.UserId + description: The stable and unique string identifying the user. type: string - - contextPath: AWS.IAM.IdentityCenter.Users.UserId - description: The user ID of the queried user. + - contextPath: AWS.IAMIdentityCenter.User.ExternalIds.Issuer + description: The issuer for an external identifier. + type: String + - contextPath: AWS.IAMIdentityCenter.User.ExternalIds.Id + description: The identifier issued to this resource by an external identity provider. + type: String + - contextPath: AWS.IAMIdentityCenter.User.Name.Formatted + description: Formatted version of the user's name for display. + type: String + - contextPath: AWS.IAMIdentityCenter.User.Name.FamilyName + description: The family name of the user. + type: String + - contextPath: AWS.IAMIdentityCenter.User.Name.GivenName + description: The given name of the user. + type: String + - contextPath: AWS.IAMIdentityCenter.User.Name.MiddleName + description: The middle name of the user. + type: String + - contextPath: AWS.IAMIdentityCenter.User.Name.HonorificPrefix + description: The honorific prefix of the user. + type: String + - contextPath: AWS.IAMIdentityCenter.User.Name.HonorificSuffix + description: The honorific suffix of the user. + type: String + - contextPath: AWS.IAMIdentityCenter.User.DisplayName + description: The name of the user formatted for display when referenced. + type: String + - contextPath: AWS.IAMIdentityCenter.User.NickName + description: An alternate name for the user. + type: String + - contextPath: AWS.IAMIdentityCenter.User.ProfileUrl + description: URL associated with the user. + type: String + - contextPath: AWS.IAMIdentityCenter.User.Emails.Value + description: Email address associated with the user. + type: String + - contextPath: AWS.IAMIdentityCenter.User.Emails.Type + description: Type of email address. + type: String + - contextPath: AWS.IAMIdentityCenter.User.Emails.Primary + description: Indicates whether this is the primary email address. + type: String + - contextPath: AWS.IAMIdentityCenter.User.Addresses.StreetAddress + description: Street address. + type: String + - contextPath: AWS.IAMIdentityCenter.User.Addresses.Locality + description: Address locality. + type: String + - contextPath: AWS.IAMIdentityCenter.User.Addresses.Region + description: Region of the address. + type: String + - contextPath: AWS.IAMIdentityCenter.User.Addresses.PostalCode + description: Postal code of the address. + type: String + - contextPath: AWS.IAMIdentityCenter.User.Addresses.Country + description: Country of the address. + type: String + - contextPath: AWS.IAMIdentityCenter.User.Addresses.Formatted + description: Formatted version of the address for display. + type: String + - contextPath: AWS.IAMIdentityCenter.User.Addresses.Type + description: Type of address. + type: String + - contextPath: AWS.IAMIdentityCenter.User.Addresses.Primary + description: Indicates whether this is the primary address. + type: String + - contextPath: AWS.IAMIdentityCenter.User.PhoneNumbers.Value + description: Phone number associated with the user. + type: String + - contextPath: AWS.IAMIdentityCenter.User.PhoneNumbers.Type + description: Type of phone number. + type: String + - contextPath: AWS.IAMIdentityCenter.User.PhoneNumbers.Primary + description: Indicates whether this is the primary phone number. + type: String + - contextPath: AWS.IAMIdentityCenter.User.UserType + description: Type of user. + type: String + - contextPath: AWS.IAMIdentityCenter.User.Title + description: Title of the user. + type: String + - contextPath: AWS.IAMIdentityCenter.User.PreferredLanguage + description: Preferred language of the user. + type: String + - contextPath: AWS.IAMIdentityCenter.User.Locale + description: Geographical region or location of the user. + type: String + - contextPath: AWS.IAMIdentityCenter.User.Timezone + description: Time zone of the user. + type: String + - contextPath: AWS.IAMIdentityCenter.User.IdentityStoreId + description: Globally unique identifier for the identity store. + type: String + - arguments: + - description: The AWS Region. If not specified, the default region will be used. + name: region + auto: PREDEFINED + predefined: + - us-east-1 + - us-east-2 + - us-west-1 + - us-west-2 + - ca-central-1 + - eu-west-1 + - eu-central-1 + - eu-west-2 + - ap-northeast-1 + - ap-northeast-2 + - ap-southeast-1 + - ap-southeast-2 + - ap-south-1 + - sa-east-1 + - eu-north-1 + - eu-west-3 + required: false + - description: The Amazon Resource Name. + name: roleArn + required: false + - description: Role session duration. + name: roleSessionDuration + required: false + - description: Role session name. + name: roleSessionName + required: false + - description: Identity store ID. + name: IdentityStoreId + required: false + - description: The name of the user to remove. + name: userName + required: true + description: Removes the specified user from the AWS IAM Identity Center. + name: aws-iam-identitycenter-delete-user + - arguments: + - auto: PREDEFINED + description: The AWS Region. If not specified, the default region will be used. + name: region + predefined: + - us-east-1 + - us-east-2 + - us-west-1 + - us-west-2 + - ca-central-1 + - eu-west-1 + - eu-central-1 + - eu-west-2 + - ap-northeast-1 + - ap-northeast-2 + - ap-southeast-1 + - ap-southeast-2 + - ap-south-1 + - sa-east-1 + - eu-north-1 + - eu-west-3 + required: false + - description: The Amazon Resource Name. + name: roleArn + required: false + - description: Role session duration. + name: roleSessionDuration + required: false + - description: Role session name. + name: roleSessionName + required: false + - description: Identity store ID. + name: IdentityStoreId + required: false + - description: The name of the group to create. + name: displayName + required: true + - description: The description of the group to create. + name: description + required: false + description: Creates a new IAM Identity Center group for your AWS account. + name: aws-iam-identitycenter-create-group + outputs: + - contextPath: AWS.IAMIdentityCenter.Group.GroupId + description: The user ID. type: string - - contextPath: AWS.IAM.IdentityCenter.Users.DisplayName - description: The display name of the queried user. + - contextPath: AWS.IAMIdentityCenter.Group.IdentityStoreId + description: Identity store ID. type: string - description: This will get user information using email address. - name: aws-iam-identitycenter-get-user-by-email - dockerimage: demisto/boto3py3:1.0.0.89670 + - arguments: + - auto: PREDEFINED + description: The AWS Region. If not specified, the default region will be used. + name: region + predefined: + - us-east-1 + - us-east-2 + - us-west-1 + - us-west-2 + - ca-central-1 + - eu-west-1 + - eu-central-1 + - eu-west-2 + - ap-northeast-1 + - ap-northeast-2 + - ap-southeast-1 + - ap-southeast-2 + - ap-south-1 + - sa-east-1 + - eu-north-1 + - eu-west-3 + required: false + - description: The Amazon Resource Name. + name: roleArn + required: false + - description: Role session duration. + name: roleSessionDuration + required: false + - description: Role session name. + name: roleSessionName + required: false + - description: Identity store ID. + name: IdentityStoreId + required: false + - description: The name of the group to remove. + name: groupName + required: true + description: Removes the specified group from the IAM Identity Center. + name: aws-iam-identitycenter-delete-group + - arguments: + - auto: PREDEFINED + description: The AWS Region. If not specified, the default region will be used. + name: region + predefined: + - us-east-1 + - us-east-2 + - us-west-1 + - us-west-2 + - ca-central-1 + - eu-west-1 + - eu-central-1 + - eu-west-2 + - ap-northeast-1 + - ap-northeast-2 + - ap-southeast-1 + - ap-southeast-2 + - ap-south-1 + - sa-east-1 + - eu-north-1 + - eu-west-3 + required: false + - description: The Amazon Resource Name. + name: roleArn + required: false + - description: Role session duration. + name: roleSessionDuration + required: false + - description: Role session name. + name: roleSessionName + required: false + - description: Identity store ID. + name: IdentityStoreId + required: false + - description: The name of the user to delete from all groups. + name: userName + required: false + - description: Comma-separated list of membership IDs to delete. + name: membershipId + required: false + description: Deletes a user from all groups if a username is provided, or deletes multiple memberships if a list of memberships is provided. + name: aws-iam-identitycenter-delete-group-membership + - arguments: + - auto: PREDEFINED + description: The AWS Region. If not specified, the default region will be used. + name: region + predefined: + - us-east-1 + - us-east-2 + - us-west-1 + - us-west-2 + - ca-central-1 + - eu-west-1 + - eu-central-1 + - eu-west-2 + - ap-northeast-1 + - ap-northeast-2 + - ap-southeast-1 + - ap-southeast-2 + - ap-south-1 + - sa-east-1 + - eu-north-1 + - eu-west-3 + required: false + - description: The Amazon Resource Name. + name: roleArn + required: false + - description: Role session duration. + name: roleSessionDuration + required: false + - description: Role session name. + name: roleSessionName + required: false + - description: Identity store ID. + name: IdentityStoreId + required: false + - description: Number of results to return. Maximum value is 50. + defaultValue: '50' + name: limit + required: false + - description: The pagination token. + name: nextToken + required: false + - description: The name of the group to list the memberships. + name: groupName + required: true + description: Lists the memberships of the group. + name: aws-iam-identitycenter-list-memberships + outputs: + - contextPath: AWS.IAMIdentityCenter.GroupMemberships.IdentityStoreId + description: The globally unique identifier for the identity store. + type: String + - contextPath: AWS.IAMIdentityCenter.Group.GroupMemberships.MembershipId + description: The identifier for a GroupMembership object in an identity store. + type: String + - contextPath: AWS.IAMIdentityCenter.Group.GroupId + description: The identifier for a group in the identity store. + type: String + - contextPath: AWS.IAMIdentityCenter.Group.GroupMemberships.UserId + description: Identifier of resources that can be members. + type: String + - contextPath: AWS.IAMIdentityCenter.GroupMembershipNextToken + description: The pagination token. + type: String + - arguments: + - description: The username of the user to update. + name: userName + required: true + - description: The family name of the user to update. + name: familyName + required: false + - description: The first name of the user to update. + name: givenName + required: false + - description: Is this the primary email address for the associated resource. + name: userEmailAddressPrimary + required: false + auto: PREDEFINED + predefined: + - yes + - no + - description: The email address of the user to update. + name: userEmailAddress + required: false + - description: The display name of the user to update. + name: displayName + required: false + - description: The profile URL of the user to update. + name: profileUrl + required: false + - description: The type of the user to update. + name: userType + required: false + - description: The title of the user to update. + name: title + required: false + - auto: PREDEFINED + description: The AWS Region. If not specified, the default region will be used. + name: region + predefined: + - us-east-1 + - us-east-2 + - us-west-1 + - us-west-2 + - ca-central-1 + - eu-west-1 + - eu-central-1 + - eu-west-2 + - ap-northeast-1 + - ap-northeast-2 + - ap-southeast-1 + - ap-southeast-2 + - ap-south-1 + - sa-east-1 + - eu-north-1 + - eu-west-3 + required: false + - description: The Amazon Resource Name. + name: roleArn + required: false + - description: Role session duration. + name: roleSessionDuration + required: false + - description: Role session name. + name: roleSessionName + required: false + - description: Identity store ID. + name: IdentityStoreId + required: false + description: Updates an IAM Identity Center user for your AWS account. + name: aws-iam-identitycenter-update-user + - arguments: + - auto: PREDEFINED + description: The AWS Region. If not specified, the default region will be used. + name: region + predefined: + - us-east-1 + - us-east-2 + - us-west-1 + - us-west-2 + - ca-central-1 + - eu-west-1 + - eu-central-1 + - eu-west-2 + - ap-northeast-1 + - ap-northeast-2 + - ap-southeast-1 + - ap-southeast-2 + - ap-south-1 + - sa-east-1 + - eu-north-1 + - eu-west-3 + required: false + - description: The Amazon Resource Name. + name: roleArn + required: false + - description: Role session duration. + name: roleSessionDuration + required: false + - description: Role session name. + name: roleSessionName + required: false + - description: Identity store ID. + name: IdentityStoreId + required: false + - description: The name of the group to update. + name: displayName + required: true + - description: The description of the group to update. + name: description + required: true + description: Updates an IAM Identity Center group for your AWS account. + name: aws-iam-identitycenter-update-group + dockerimage: demisto/boto3py3:1.0.0.94100 runonce: false script: '' subtype: python3 type: python fromversion: 5.0.0 tests: -- No tests (auto formatted) +- AWS-IAMIdentityCenter-Test diff --git a/Packs/AWS-IAMIdentityCenter/Integrations/AWSIAMIdentityCenter/AWSIAMIdentityCenter_description.md b/Packs/AWS-IAMIdentityCenter/Integrations/AWSIAMIdentityCenter/AWSIAMIdentityCenter_description.md index b628e7b0a70e..03dbac181208 100644 --- a/Packs/AWS-IAMIdentityCenter/Integrations/AWSIAMIdentityCenter/AWSIAMIdentityCenter_description.md +++ b/Packs/AWS-IAMIdentityCenter/Integrations/AWSIAMIdentityCenter/AWSIAMIdentityCenter_description.md @@ -1,11 +1,9 @@ -AWS IAM Identity Center +Use AWS IAM Identity Center to manage access to your AWS resources securely. With IAM Identity Center, you can define and enforce permissions for users and resources across your AWS account. -Author: Sameh El-Hakim - -With AWS IAM Identity Center (successor to AWS Single Sign-On), you can manage sign-in security for your workforce identities, also known as workforce users. IAM Identity Center provides one place where you can create or connect workforce users and manage their access centrally across all their AWS accounts and applications. IAM Identity Center is the recommended approach for workforce authentication and authorization in AWS, for organizations of any size and type. +IAM Identity Center uses the sso and identitystore API namespaces. --- -[View Integration Documentation](https://xsoar.pan.dev/docs/reference/integrations/aws-iam-identitycenter) +[View Integration Authentication](https://xsoar.pan.dev/docs/reference/articles/aws-integrations---authentication) diff --git a/Packs/AWS-IAMIdentityCenter/Integrations/AWSIAMIdentityCenter/AWSIAMIdentityCenter_test.py b/Packs/AWS-IAMIdentityCenter/Integrations/AWSIAMIdentityCenter/AWSIAMIdentityCenter_test.py new file mode 100644 index 000000000000..ebec4d372141 --- /dev/null +++ b/Packs/AWS-IAMIdentityCenter/Integrations/AWSIAMIdentityCenter/AWSIAMIdentityCenter_test.py @@ -0,0 +1,862 @@ + +import importlib +import demistomock as demisto +import pytest + +AWSIAMIdentityCenter = importlib.import_module("AWSIAMIdentityCenter") + +RESPONSE_GROUP_ID = 'GROUP_ID' + +RESPONSE_USER_ID = 'USER_ID' + +RESPONSE_DELETE = {'ResponseMetadata': {'HTTPStatusCode': 200}} + +IDENTITY_STORE_ID = '123456' + + +class Boto3Client: + def create_user(self): + pass + + def create_group(self): + pass + + def list_users(self): + pass + + def list_groups(self): + pass + + def describe_user(self): + pass + + def describe_group(self): + pass + + def create_group_membership(self): + pass + + def list_group_memberships_for_member(self): + pass + + def delete_group_membership(self): + pass + + def delete_user(self): + pass + + def delete_group(self): + pass + + def list_group_memberships(self): + pass + + def update_user(self): + pass + + def update_group(self): + pass + + def get_user_operations_list(self): + pass + + +def test_create_user(mocker): + """ + Given: + Arguments for creating a user + + When: + Creating a user using the create-user command + + Then: + Verify that the user is created with the correct arguments + """ + + args = { + 'userName': 'test_user', + 'familyName': 'Doe', + 'givenName': 'John', + 'userEmailAddress': 'john.doe@example.com', + 'displayName': 'John Doe', + 'userEmailAddressPrimary': True + } + res = { + 'UserId': 'USER_ID', + 'IdentityStoreId': IDENTITY_STORE_ID, + 'ResponseMetadata': {'HTTPStatusCode': 200} + } + + from AWSIAMIdentityCenter import create_user + mocker.patch.object(Boto3Client, "create_user", return_value=res) + mocker.patch.object(demisto, 'results') + + client = Boto3Client() + create_user(args, client, IDENTITY_STORE_ID) + contents = demisto.results.call_args[0][0] + assert {'UserId': 'USER_ID', 'IdentityStoreId': IDENTITY_STORE_ID} in contents.get( + 'EntryContext').values() + assert 'User test_user has been successfully created with user id USER_ID' in contents.get('HumanReadable') + + +def test_update_user(mocker): + """ + Given: + Arguments for updating a user + + When: + updating a user details using the update-user command + + Then: + Verify that the user is updated + """ + + response_id = {'UserId': 'USER_ID'} + + args = { + 'userName': 'test_user', + 'familyName': 'changed_fam', + } + + from AWSIAMIdentityCenter import update_user + mocker.patch.object(AWSIAMIdentityCenter, "get_userId_by_username", return_value=response_id) + mocker.patch.object(Boto3Client, "update_user", return_value={}) + mocker.patch.object(demisto, 'results') + + client = Boto3Client() + update_user(args, client, IDENTITY_STORE_ID) + contents = demisto.results.call_args[0][0] + assert 'User test_user has been successfully updated' in contents.get('HumanReadable') + + +def test_update_group(mocker): + """ + Given: + Arguments for updating a group + + When: + updating a group description using the update-group command + + Then: + Verify that the group is updated + """ + + response_id = {'GroupId': 'GROUP_ID'} + + args = { + 'displayName': 'test_group', + 'description': 'changed_description', + } + + from AWSIAMIdentityCenter import update_group + mocker.patch.object(AWSIAMIdentityCenter, "get_groupId_by_displayName", return_value=response_id) + mocker.patch.object(Boto3Client, "update_group", return_value={}) + mocker.patch.object(demisto, 'results') + + client = Boto3Client() + update_group(args, client, IDENTITY_STORE_ID) + contents = demisto.results.call_args[0][0] + assert 'Group test_group has been successfully updated' in contents.get('HumanReadable') + + +def test_create_group(mocker): + """ + Given: + Arguments for creating a group + + When: + Creating a group using the create-group command + + Then: + Verify that the group is created with the correct arguments + """ + args = { + 'displayName': 'Test Group', + 'description': 'Test Description' + } + res = { + 'GroupId': IDENTITY_STORE_ID, + 'ResponseMetadata': {'HTTPStatusCode': 200} + } + + from AWSIAMIdentityCenter import create_group + mocker.patch.object(Boto3Client, "create_group", return_value=res) + mocker.patch.object(demisto, 'results') + + client = Boto3Client() + create_group(args, client, IDENTITY_STORE_ID) + contents = demisto.results.call_args[0][0] + assert {'GroupId': IDENTITY_STORE_ID} in contents.get( + 'EntryContext').values() + assert 'Group Test Group has been successfully created with id 123456' in contents.get('HumanReadable') + + +def test_list_users(mocker): + """ + Given: + Arguments for listing users + + When: + Listing users using the list-users command + + Then: + Verify that the correct users are listed with the correct details + """ + args = { + 'limit': 1, + 'nextToken': 'TOKEN' + } + + res = { + 'IdentityStoreId': IDENTITY_STORE_ID, + 'Users': [ + { + 'UserId': 'USER_ID', + 'UserName': 'test_user', + 'DisplayName': 'Test User', + 'Emails': [{'Value': 'test@example.com'}] + } + ], + 'NextToken': 'NEXT_TOKEN' + } + + from AWSIAMIdentityCenter import list_users + mocker.patch.object(Boto3Client, "list_users", return_value=res) + mocker.patch.object(demisto, 'results') + + client = Boto3Client() + list_users(args, client, IDENTITY_STORE_ID) + contents = demisto.results.call_args[0][0] + assert 'AWS IAM Identity Center Users' in contents.get('HumanReadable') + assert [{'UserId': 'USER_ID', 'UserName': 'test_user', 'DisplayName': 'Test User', 'Emails': [{ + 'Value': 'test@example.com'}]}] in contents.get( + 'EntryContext').values() + assert {'UserNextToken': 'NEXT_TOKEN'} in contents.get( + 'EntryContext').values() + + +def test_list_groups(mocker): + """ + Given: + Arguments for listing groups + + When: + Listing groups using the list-groups command + + Then: + Verify that the correct groups are listed with the correct details + """ + args = {} + + res = { + 'Groups': [ + { + 'GroupId': '123', + 'DisplayName': 'Test Group', + 'Description': 'Test Description' + } + ], + 'NextToken': None + } + + from AWSIAMIdentityCenter import list_groups + mocker.patch.object(Boto3Client, "list_groups", return_value=res) + mocker.patch.object(demisto, 'results') + + client = Boto3Client() + list_groups(args, client, IDENTITY_STORE_ID) + contents = demisto.results.call_args[0][0] + assert 'AWS IAM Identity Center Groups' in contents.get('HumanReadable') + assert [{'GroupId': '123', 'DisplayName': 'Test Group', 'Description': 'Test Description'}] in contents.get( + 'EntryContext').values() + assert {'GroupNextToken': None} in contents.get( + 'EntryContext').values() + + +def test_get_user(mocker): + """ + Given: + User Name + + When: + Getting a user using the get-user command + + Then: + Verify that the correct user is retrieved with the correct details + """ + args = { + 'userName': 'test_user' + } + + response_id = {'UserId': 'USER_ID'} + + res = { + 'UserId': 'USER_ID', + 'UserName': 'test_user', + 'DisplayName': 'Test User', + 'Emails': [{'Value': 'test@example.com'}], + 'ResponseMetadata': {'HTTPStatusCode': 200} + } + + from AWSIAMIdentityCenter import get_user + mocker.patch.object(AWSIAMIdentityCenter, "get_userId_by_username", return_value=response_id) + mocker.patch.object(Boto3Client, "describe_user", return_value=res) + mocker.patch.object(demisto, 'results') + + client = Boto3Client() + get_user(args, client, IDENTITY_STORE_ID) + contents = demisto.results.call_args[0][0] + + assert {'UserId': 'USER_ID', 'UserName': 'test_user', 'DisplayName': 'Test User', 'Emails': [{ + 'Value': 'test@example.com'}]} in contents.get( + 'EntryContext').values() + assert 'AWS IAM Identity Center Users' in contents.get('HumanReadable') + + +def test_get_user_by_email(mocker): + """ + Given: + Email address + + When: + Getting a user using the get-user-by-email command + + Then: + Verify that the correct user is retrieved with the correct details + """ + + args = { + 'emailAddress': 'test@example.com' + } + + res = { + 'Users': [ + { + 'UserId': 'USER_ID', + 'UserName': 'test_user', + 'DisplayName': 'Test User', + 'Name': { + 'FamilyName': 'User', + 'GivenName': 'Test', + }, + 'Emails': [ + {'Value': 'test@example.com', + 'Type': 'work', + 'Primary': True} + ], + } + ], + 'ResponseMetadata': {'HTTPStatusCode': 200} + } + + from AWSIAMIdentityCenter import get_user_by_email + mocker.patch.object(Boto3Client, "list_users", return_value=res) + mocker.patch.object(demisto, 'results') + + client = Boto3Client() + get_user_by_email(args, client, IDENTITY_STORE_ID) + contents = demisto.results.call_args[0][0] + + assert {'UserId': 'USER_ID', 'UserName': 'test_user', 'DisplayName': 'Test User', + 'Name': {'FamilyName': 'User', 'GivenName': 'Test'}, + 'Emails': [{'Value': 'test@example.com', 'Type': 'work', 'Primary': True}]} in contents.get( + 'EntryContext').values() + assert 'AWS IAM Identity Center Users' in contents.get('HumanReadable') + + +def test_get_user_by_email_not_exist(mocker): + """ + Given: + Not existing email address + + When: + Asking for a user details using the get-user-by-email command + + Then: + Return an error + """ + + args = { + 'emailAddress': 'notexist@example.com' + } + + # Mock the response to indicate that no user exists + res = {'Users': [ + { + 'UserId': 'USER_ID', + 'UserName': 'test_user', + 'DisplayName': 'Test User', + 'Name': { + 'FamilyName': 'User', + 'GivenName': 'Test', + }, + 'Emails': [ + {'Value': 'test@example.com', + 'Type': 'work', + 'Primary': True} + ], + } + ], + 'ResponseMetadata': {'HTTPStatusCode': 200}} + + mocker.patch.object(Boto3Client, "list_users", return_value=res) + mocker.patch.object(demisto, 'results') + return_error_mock = mocker.patch.object(AWSIAMIdentityCenter, 'return_error') + + from AWSIAMIdentityCenter import get_user_by_email + client = Boto3Client() + get_user_by_email(args, client, IDENTITY_STORE_ID) + + assert return_error_mock.call_count == 1 + assert 'User with the email notexist@example.com was not found.' in return_error_mock.call_args.args + + +def test_get_group(mocker): + """ + Given: + Arguments for getting a group + + When: + Getting a group using the get-group command + + Then: + Verify that the correct group is retrieved with the correct details + """ + args = { + 'displayName': 'test_group' + } + + response_id = {'GroupId': IDENTITY_STORE_ID} + + res = { + 'GroupId': 'string', + 'DisplayName': 'test_group', + 'Description': None, + 'ResponseMetadata': {'HTTPStatusCode': 200} + } + + from AWSIAMIdentityCenter import get_group + mocker.patch.object(AWSIAMIdentityCenter, "get_groupId_by_displayName", return_value=response_id) + mocker.patch.object(Boto3Client, "describe_group", return_value=res) + mocker.patch.object(demisto, 'results') + + client = Boto3Client() + get_group(args, client, IDENTITY_STORE_ID) + contents = demisto.results.call_args[0][0] + + assert {'GroupId': 'string', 'DisplayName': 'test_group', 'Description': None} in contents.get( + 'EntryContext').values() + assert 'AWS IAM Identity Center Groups' in contents.get('HumanReadable') + + +def test_add_user_to_group(mocker): + """ + Given: + Arguments for adding a user to a group + + When: + Adding a user to a group using the add-user-to-group command + + Then: + Verify that the user is added to the group + """ + args = { + 'userName': 'test_user', + 'displayName': 'test_group' + } + + res = { + 'MembershipId': '10203040', + 'ResponseMetadata': {'HTTPStatusCode': 200} + } + + from AWSIAMIdentityCenter import add_user_to_group + mocker.patch.object(AWSIAMIdentityCenter, "get_userId_by_username", return_value=RESPONSE_USER_ID) + mocker.patch.object(AWSIAMIdentityCenter, "get_groupId_by_displayName", return_value=RESPONSE_GROUP_ID) + mocker.patch.object(Boto3Client, "create_group_membership", return_value=res) + mocker.patch.object(demisto, 'results') + + client = Boto3Client() + add_user_to_group(args, client, IDENTITY_STORE_ID) + contents = demisto.results.call_args[0][0] + + assert 'The membership id 10203040 has been successfully created.' in contents.get('HumanReadable') + + +def test_list_groups_for_user(mocker): + """ + Given: + Arguments for listing groups for a user + + When: + Listing groups for a user using the list-groups-for-user command + + Then: + Verify that the correct groups are listed for the user with the correct details + """ + args = { + 'userName': 'test_user' + } + + res = { + 'GroupMemberships': [ + { + 'MemberId': {'UserId': 'USER_ID'}, + 'GroupId': 'GROUP_ID', + 'MembershipId': 'MEMBERSHIP_ID' + } + ], + 'NextToken': None + } + + from AWSIAMIdentityCenter import list_groups_for_user + mocker.patch.object(AWSIAMIdentityCenter, "get_userId_by_username", return_value=RESPONSE_USER_ID) + mocker.patch.object(Boto3Client, "list_group_memberships_for_member", return_value=res) + mocker.patch.object(demisto, 'results') + + client = Boto3Client() + list_groups_for_user(args, client, IDENTITY_STORE_ID) + contents = demisto.results.call_args[0][0] + + assert {'GroupMemberships': [{'GroupId': 'GROUP_ID', 'MembershipId': 'MEMBERSHIP_ID'}], 'UserId': 'USER_ID', + 'GroupsUserNextToken': None} in contents.get('EntryContext').values() + assert 'AWS IAM Identity Center Groups' in contents.get('HumanReadable') + + +def test_delete_group_membership(mocker): + """ + Given: + Username for deleting a group membership + + When: + Deleting a group membership using the delete-group-membership command + + Then: + Verify that the correct group membership is deleted + """ + args = { + 'userName': 'test_user' + } + + res = { + 'GroupMemberships': [ + { + 'MemberId': {'UserId': 'USER_ID'}, + 'GroupId': 'GROUP_ID', + 'MembershipId': 'MEMBERSHIP_ID' + }, + { + 'MemberId': {'UserId': 'USER_ID'}, + 'GroupId': 'GROUP_ID2', + 'MembershipId': 'MEMBERSHIP_ID123' + } + ], + 'NextToken': None + } + + from AWSIAMIdentityCenter import delete_group_membership + mocker.patch.object(AWSIAMIdentityCenter, "get_userId_by_username", return_value=RESPONSE_USER_ID) + mocker.patch.object(Boto3Client, "list_group_memberships_for_member", return_value=res) + mocker.patch.object(Boto3Client, "delete_group_membership", return_value=RESPONSE_DELETE) + mocker.patch.object(demisto, 'results') + + client = Boto3Client() + delete_group_membership(args, client, IDENTITY_STORE_ID) + contents = demisto.results.call_args[0][0] + + assert "The membership with ids ['MEMBERSHIP_ID', 'MEMBERSHIP_ID123'] have been deleted." in contents.get('HumanReadable') + + +def test_delete_group_memberships_by_membershipId(mocker): + """ + Given: + List of group memberships + + When: + Deleting a group membership using the delete-group-membership command + + Then: + Verify that the correct group membership is deleted + """ + args = { + 'membershipId': 'MEMBERSHIP_ID, MEMBERSHIP_ID123' + } + + from AWSIAMIdentityCenter import delete_group_membership + mocker.patch.object(AWSIAMIdentityCenter, "get_userId_by_username", return_value=RESPONSE_USER_ID) + mocker.patch.object(Boto3Client, "delete_group_membership", return_value=RESPONSE_DELETE) + mocker.patch.object(demisto, 'results') + + client = Boto3Client() + delete_group_membership(args, client, IDENTITY_STORE_ID) + contents = demisto.results.call_args[0][0] + + assert "The membership with ids ['MEMBERSHIP_ID', 'MEMBERSHIP_ID123'] have been deleted." in contents.get('HumanReadable') + + +def test_delete_user(mocker): + """ + Given: + Arguments for deleting a user + + When: + Deleting a user using the delete-user command + + Then: + Verify that the correct user is deleted + """ + args = { + 'userName': 'test_user' + } + + from AWSIAMIdentityCenter import delete_user + mocker.patch.object(AWSIAMIdentityCenter, "get_userId_by_username", return_value=RESPONSE_USER_ID) + mocker.patch.object(Boto3Client, "delete_user", return_value=RESPONSE_DELETE) + mocker.patch.object(demisto, 'results') + + client = Boto3Client() + delete_user(args, client, IDENTITY_STORE_ID) + contents = demisto.results.call_args[0][0] + + assert 'The User USER_ID has been removed.' in contents.get('HumanReadable') + + +def test_delete_group(mocker): + """ + Given: + Arguments for deleting a group + + When: + Deleting a group using the delete-group command + + Then: + Verify that the correct group is deleted + """ + args = { + 'displayName': 'test_group' + } + + from AWSIAMIdentityCenter import delete_group + mocker.patch.object(AWSIAMIdentityCenter, "get_groupId_by_displayName", return_value=RESPONSE_GROUP_ID) + mocker.patch.object(Boto3Client, "delete_group", return_value=RESPONSE_DELETE) + mocker.patch.object(demisto, 'results') + + client = Boto3Client() + delete_group(args, client, IDENTITY_STORE_ID) + contents = demisto.results.call_args[0][0] + + assert 'The Group GROUP_ID has been removed.' in contents.get('HumanReadable') + + +def test_list_group_memberships(mocker): + """ + Given: + Arguments for listing group memberships + + When: + Listing group memberships using the list-group-memberships command + + Then: + Verify that the correct group memberships are listed with the correct details + """ + args = { + 'displayName': 'test_group', + } + + response = { + 'GroupMemberships': [ + { + 'MembershipId': 'MEMBERSHIP_ID', + 'MemberId': {'UserId': 'USER_ID'} + } + ], + 'NextToken': 'NEXT_TOKEN' + } + + from AWSIAMIdentityCenter import list_group_memberships + mocker.patch.object(AWSIAMIdentityCenter, "get_groupId_by_displayName", return_value=RESPONSE_GROUP_ID) + mocker.patch.object(Boto3Client, "list_group_memberships", return_value=response) + mocker.patch.object(demisto, 'results') + + client = Boto3Client() + list_group_memberships(args, client, IDENTITY_STORE_ID) + contents = demisto.results.call_args[0][0] + + assert {'GroupId': 'GROUP_ID', 'GroupMemberships': [{'MembershipId': 'MEMBERSHIP_ID', 'UserId': 'USER_ID'}], + 'GroupMembershipNextToken': 'NEXT_TOKEN'} in contents.get('EntryContext').values() + assert 'AWS IAM Identity Center Groups' in contents.get('HumanReadable') + + +def test_get_user_operations_list_empty_region(): + """ + Given: + Arguments not including 'region' argument in the input dictionary. + + When: + Generating a list of operations to update user information using get_user_operations_list function. + + Then: + Ensure that the function handles the empty 'region' argument properly and generates the expected list of operations. + """ + # Input arguments with empty 'region' + args = { + 'userEmailAddressPrimary': 'true', + 'userEmailAddress': 'test@example.com', + 'familyName': 'Doe', + 'givenName': 'John', + 'displayName': 'John Doe', + 'userType': 'Employee', + 'profileUrl': 'https://example.com/profile', + 'title': 'Software Engineer' + } + + # Expected list of operations + expected_operations = [ + {'AttributePath': 'name.familyName', 'AttributeValue': 'Doe'}, + {'AttributePath': 'name.givenName', 'AttributeValue': 'John'}, + {'AttributePath': 'emails', 'AttributeValue': [{'value': 'test@example.com', 'primary': True}]}, + {'AttributePath': 'displayName', 'AttributeValue': 'John Doe'}, + {'AttributePath': 'userType', 'AttributeValue': 'Employee'}, + {'AttributePath': 'profileUrl', 'AttributeValue': 'https://example.com/profile'}, + {'AttributePath': 'title', 'AttributeValue': 'Software Engineer'} + ] + + # Call the function to be tested + from AWSIAMIdentityCenter import get_user_operations_list + result = get_user_operations_list(args) + + # Assert that the result matches the expected list of operations + assert result == expected_operations + + +@pytest.mark.parametrize('last_data, current_data, expected_results', [ + ([ + { + "id": 1, "groups": [{ + 'GroupId': 'GROUP_1', + 'MembershipId': 'A' + }, + { + 'GroupId': 'GROUP_2', + 'MembershipId': 'B' + }]}, + { + "id": 2, "groups": [{ + 'GroupId': 'GROUP_1', + 'MembershipId': 'C' + }, + { + 'GroupId': 'GROUP_3', + 'MembershipId': 'D' + }]} + ], + [ + { + 'GroupId': 'GROUP_1', + 'MembershipId': 'C' + }, + { + 'GroupId': 'GROUP_3', + 'MembershipId': 'D' + }, + { + 'GroupId': 'GROUP_4', + 'MembershipId': 'F' + } + ], + [ + { + 'GroupId': 'GROUP_1', + 'MembershipId': 'C' + }, + { + 'GroupId': 'GROUP_3', + 'MembershipId': 'D' + }, + { + 'GroupId': 'GROUP_4', + 'MembershipId': 'F' + } + ]), + ( + { + "id": 2, "groups": [{ + 'GroupId': 'GROUP_1', + 'MembershipId': 'C' + }, + { + 'GroupId': 'GROUP_2', + 'MembershipId': 'B' + }]}, + [ + { + 'GroupId': 'GROUP_1', + 'MembershipId': 'C' + }, + { + 'GroupId': 'GROUP_3', + 'MembershipId': 'D' + }, + { + 'GroupId': 'GROUP_4', + 'MembershipId': 'F' + } + ], + [ + { + 'GroupId': 'GROUP_1', + 'MembershipId': 'C' + }, + { + 'GroupId': 'GROUP_2', + 'MembershipId': 'B' + }, + { + 'GroupId': 'GROUP_3', + 'MembershipId': 'D' + }, + { + 'GroupId': 'GROUP_4', + 'MembershipId': 'F' + } + ] + ), + ( + [], + [ + { + 'GroupId': 'GROUP_1', + 'MembershipId': 'C' + } + ], + [ + { + 'GroupId': 'GROUP_1', + 'MembershipId': 'C' + } + ] + ) + +]) +def test_update_groups_and_memberships(mocker, last_data, current_data, expected_results): + """ + Given: + Arguments for updating groups and memberships + + When: + Updating groups and memberships using the update_groups_and_memberships function + + Then: + Verify that the correct groups and memberships are updated with the correct details + """ + + key = "id" + id_value = 2 + new_data = "groups" + + from AWSIAMIdentityCenter import update_groups_and_memberships + mocker.patch.object(AWSIAMIdentityCenter, "update_groups_and_memberships") + + updated_data = update_groups_and_memberships(last_data, current_data, key, id_value, new_data) + + assert updated_data == expected_results diff --git a/Packs/AWS-IAMIdentityCenter/Integrations/AWSIAMIdentityCenter/README.md b/Packs/AWS-IAMIdentityCenter/Integrations/AWSIAMIdentityCenter/README.md index b0c3cb1f3ae8..64548a9da1d9 100644 --- a/Packs/AWS-IAMIdentityCenter/Integrations/AWSIAMIdentityCenter/README.md +++ b/Packs/AWS-IAMIdentityCenter/Integrations/AWSIAMIdentityCenter/README.md @@ -10,20 +10,22 @@ For detailed instructions about setting up authentication, see: [AWS Integration | **Parameter** | **Description** | **Required** | | --- | --- | --- | - | Role Arn | | True | - | Role Session Name | | True | + | Role Arn | | False | + | Role Session Name | A descriptive name for the assumed role session. | False | | AWS Default Region | | True | | Role Session Duration | | False | - | Timeout | The time in seconds till a timeout exception is reached. You can specify just the read timeout \(for example 60\) or also the connect timeout followed after a comma \(for example 60,10\). If a connect timeout is not specified, a default of 10 second will be used. | False | + | Identity Store ID | The Identity Store ID parameter is required for API requests. It can be provided as a parameter or as an argument. If the Identity Store ID was not specified - Test failure. | False | + | Access Key | | False | + | Secret Key | | False | + | Access Key | | False | + | Secret Key | | False | + | Timeout | The time in seconds until a timeout exception is reached. You can specify just the read timeout \(for example 60\) or also the connect timeout followed after a comma \(for example 60,10\). If a connect timeout is not specified, a default of 10 seconds will be used. | False | | Retries | The maximum number of retry attempts when connection or throttling errors are encountered. Set to 0 to disable retries. The default value is 5 and the limit is 10. Note: Increasing the number of retries will increase the execution time. | False | | Trust any certificate (not secure) | | False | - | Use system proxy settings | | False | - | Identity Store ID | | True | - | Secret Key | | True | - | Access Key | | True | 4. Click **Test** to validate the URLs, token, and connection. + ## Commands You can execute these commands from the Cortex XSOAR CLI, as part of an automation, or in a playbook. @@ -43,21 +45,56 @@ Creates a new IAM Identity Center user for your AWS account. | **Argument Name** | **Description** | **Required** | | --- | --- | --- | | userName | The username of the user to create. | Required | -| familyName | The family name of the user to create. | Optional | -| givenName | The First name of the user to create. | Required | -| userEmailAddress | The email address of the user to create. | Required | +| familyName | The family name of the user to create. | Required | +| givenName | The first name of the user to create. | Required | +| userEmailAddress | The email address of the user to create. | Optional | | displayName | The display name of the user to create. | Required | +| profileUrl | The profile URL of the user to create. | Optional | +| userEmailAddressPrimary | Is this the primary email address for the associated resource?. Possible values are: yes, no. | Optional | +| userType | The type of the user to create. | Optional | +| title | The title of the user to create. | Optional | +| region | The AWS Region. If not specified, the default region will be used. Possible values are: us-east-1, us-east-2, us-west-1, us-west-2, ca-central-1, eu-west-1, eu-central-1, eu-west-2, ap-northeast-1, ap-northeast-2, ap-southeast-1, ap-southeast-2, ap-south-1, sa-east-1, eu-north-1, eu-west-3. | Optional | +| roleArn | The Amazon Resource Name. | Optional | +| roleSessionDuration | Role session duration. | Optional | +| roleSessionName | Role session name. | Optional | +| IdentityStoreId | Identity store ID. | Optional | #### Context Output | **Path** | **Type** | **Description** | | --- | --- | --- | -| AWS.IAMIdentityCenter.Users.UserId | date | The date and time, when the user was created. | +| AWS.IAMIdentityCenter.User.UserId | string | The user ID. | +| AWS.IAMIdentityCenter.User.IdentityStoreId | string | Identity Store ID. | + +#### Command example +```!aws-iam-identitycenter-create-user displayName="John Doe" familyName=Doe givenName=John userName=johndoe userEmailAddress=johnDoe@gmail.com``` +#### Context Example +```json +{ + "AWS": { + "IAMIdentityCenter": { + "User": { + "IdentityStoreId": "d-9967750fbd", + "UserId": "634418e2-20c1-703e-4358-a8312472c85d" + } + } + } +} +``` + +#### Human Readable Output + +>### User johndoe has been successfully created with user id 634418e2-20c1-703e-4358-a8312472c85d +>|IdentityStoreId|UserId| +>|---|---| +>| d-9967750fbd | 634418e2-20c1-703e-4358-a8312472c85d | + + ### aws-iam-identitycenter-get-user *** -Retrieves information about the specified IAM user, including the user creation date, path, unique ID, and ARN. +Retrieves information about the specified IAM user. #### Base Command @@ -67,16 +104,86 @@ Retrieves information about the specified IAM user, including the user creation | **Argument Name** | **Description** | **Required** | | --- | --- | --- | +| region | The AWS Region. If not specified, the default region will be used. Possible values are: us-east-1, us-east-2, us-west-1, us-west-2, ca-central-1, eu-west-1, eu-central-1, eu-west-2, ap-northeast-1, ap-northeast-2, ap-southeast-1, ap-southeast-2, ap-south-1, sa-east-1, eu-north-1, eu-west-3. | Optional | +| roleArn | The Amazon Resource Name. | Optional | +| roleSessionDuration | Role session duration. | Optional | +| roleSessionName | Role session name. | Optional | +| IdentityStoreId | Identity store ID. | Optional | | userName | The name of the user to get information about. | Required | #### Context Output | **Path** | **Type** | **Description** | | --- | --- | --- | -| AWS.IAM.IdentityCenter.Users.UserName | string | The friendly name identifying the user. | -| AWS.IAM.IdentityCenter.Users.UserId | string | The stable and unique string identifying the user. | -| AWS.IAM.IdentityCenter.Users.Email | string | The user email address. | -| AWS.IAM.IdentityCenter.Users.DisplayName | unknown | The user display name in AWS IAM IdentityCenter. | +| AWS.IAMIdentityCenter.User.UserName | string | The friendly name identifying the user. | +| AWS.IAMIdentityCenter.User.UserId | string | The stable and unique string identifying the user. | +| AWS.IAMIdentityCenter.User.ExternalIds.Issuer | String | The issuer for an external identifier. | +| AWS.IAMIdentityCenter.User.ExternalIds.Id | String | The identifier issued to this resource by an external identity provider. | +| AWS.IAMIdentityCenter.User.Name.Formatted | String | Formatted version of the user's name for display. | +| AWS.IAMIdentityCenter.User.Name.FamilyName | String | The family name of the user. | +| AWS.IAMIdentityCenter.User.Name.GivenName | String | The given name of the user. | +| AWS.IAMIdentityCenter.User.Name.MiddleName | String | The middle name of the user. | +| AWS.IAMIdentityCenter.User.Name.HonorificPrefix | String | The honorific prefix of the user. | +| AWS.IAMIdentityCenter.User.Name.HonorificSuffix | String | The honorific suffix of the user. | +| AWS.IAMIdentityCenter.User.DisplayName | String | The name of the user formatted for display when referenced. | +| AWS.IAMIdentityCenter.User.NickName | String | An alternate name for the user. | +| AWS.IAMIdentityCenter.User.ProfileUrl | String | URL associated with the user. | +| AWS.IAMIdentityCenter.User.Emails.Value | String | Email address associated with the user. | +| AWS.IAMIdentityCenter.User.Emails.Type | String | Type of email address. | +| AWS.IAMIdentityCenter.User.Emails.Primary | String | Indicates whether this is the primary email address. | +| AWS.IAMIdentityCenter.User.Addresses.StreetAddress | String | Street address. | +| AWS.IAMIdentityCenter.User.Addresses.Locality | String | Address locality. | +| AWS.IAMIdentityCenter.User.Addresses.Region | String | Region of the address. | +| AWS.IAMIdentityCenter.User.Addresses.PostalCode | String | Postal code of the address. | +| AWS.IAMIdentityCenter.User.Addresses.Country | String | Country of the address. | +| AWS.IAMIdentityCenter.User.Addresses.Formatted | String | Formatted version of the address for display. | +| AWS.IAMIdentityCenter.User.Addresses.Type | String | Type of address. | +| AWS.IAMIdentityCenter.User.Addresses.Primary | String | Indicates whether this is the primary address. | +| AWS.IAMIdentityCenter.User.PhoneNumbers.Value | String | Phone number associated with the user. | +| AWS.IAMIdentityCenter.User.PhoneNumbers.Type | String | Type of phone number. | +| AWS.IAMIdentityCenter.User.PhoneNumbers.Primary | String | Indicates whether this is the primary phone number. | +| AWS.IAMIdentityCenter.User.UserType | String | Type of user. | +| AWS.IAMIdentityCenter.User.Title | String | Title of the user. | +| AWS.IAMIdentityCenter.User.PreferredLanguage | String | Preferred language of the user. | +| AWS.IAMIdentityCenter.User.Locale | String | Geographical region or location of the user. | +| AWS.IAMIdentityCenter.User.Timezone | String | Time zone of the user. | +| AWS.IAMIdentityCenter.User.IdentityStoreId | String | Globally unique identifier for the identity store. | + +#### Command example +```!aws-iam-identitycenter-get-user userName=johndoe``` +#### Context Example +```json +{ + "AWS": { + "IAMIdentityCenter": { + "User": { + "DisplayName": "John Doe", + "Emails": [ + { + "Value": "johnDoe@gmail.com" + } + ], + "IdentityStoreId": "d-9967750fbd", + "Name": { + "FamilyName": "Doe", + "GivenName": "John" + }, + "UserId": "634418e2-20c1-703e-4358-a8312472c85d", + "UserName": "johndoe" + } + } + } +} +``` + +#### Human Readable Output + +>### AWS IAM Identity Center Users +>|DisplayName|Emails|UserId|UserName| +>|---|---|---|---| +>| John Doe | johnDoe@gmail.com | 634418e2-20c1-703e-4358-a8312472c85d | johndoe | + + ### aws-iam-identitycenter-list-users @@ -91,13 +198,91 @@ Lists the IAM users, returns all users in the AWS account. | **Argument Name** | **Description** | **Required** | | --- | --- | --- | +| region | The AWS Region. If not specified, the default region will be used. Possible values are: us-east-1, us-east-2, us-west-1, us-west-2, ca-central-1, eu-west-1, eu-central-1, eu-west-2, ap-northeast-1, ap-northeast-2, ap-southeast-1, ap-southeast-2, ap-south-1, sa-east-1, eu-north-1, eu-west-3. | Optional | +| roleArn | The Amazon Resource Name. | Optional | +| roleSessionDuration | Role session duration. | Optional | +| roleSessionName | Role session name. | Optional | +| IdentityStoreId | Identity store ID. | Optional | +| limit | Number of results to return. Default is 50. | Optional | +| nextToken | The pagination token. | Optional | #### Context Output | **Path** | **Type** | **Description** | | --- | --- | --- | -| AWS.IAMIdentityCenter.Users.UserName | string | The friendly name identifying the user. | -| AWS.IAMIdentityCenter.Users.UserId | string | The stable and unique string identifying the user. | +| AWS.IAMIdentityCenter.User.UserName | string | The friendly name identifying the user. | +| AWS.IAMIdentityCenter.User.UserId | string | The stable and unique string identifying the user. | +| AWS.IAMIdentityCenter.User.ExternalIds.Issuer | String | The issuer for an external identifier. | +| AWS.IAMIdentityCenter.User.ExternalIds.Id | String | The identifier issued to this resource by an external identity provider. | +| AWS.IAMIdentityCenter.User.Name.Formatted | String | Formatted version of the user's name for display. | +| AWS.IAMIdentityCenter.User.Name.FamilyName | String | The family name of the user. | +| AWS.IAMIdentityCenter.User.Name.GivenName | String | The given name of the user. | +| AWS.IAMIdentityCenter.User.Name.MiddleName | String | The middle name of the user. | +| AWS.IAMIdentityCenter.User.Name.HonorificPrefix | String | The honorific prefix of the user. | +| AWS.IAMIdentityCenter.User.Name.HonorificSuffix | String | The honorific suffix of the user. | +| AWS.IAMIdentityCenter.User.DisplayName | String | The name of the user formatted for display when referenced. | +| AWS.IAMIdentityCenter.User.NickName | String | An alternate name for the user. | +| AWS.IAMIdentityCenter.User.ProfileUrl | String | URL associated with the user. | +| AWS.IAMIdentityCenter.User.Emails.Value | String | Email address associated with the user. | +| AWS.IAMIdentityCenter.User.Emails.Type | String | Type of email address. | +| AWS.IAMIdentityCenter.User.Emails.Primary | String | Indicates whether this is the primary email address. | +| AWS.IAMIdentityCenter.User.Addresses.StreetAddress | String | Street address. | +| AWS.IAMIdentityCenter.User.Addresses.Locality | String | Address locality. | +| AWS.IAMIdentityCenter.User.Addresses.Region | String | Region of the address. | +| AWS.IAMIdentityCenter.User.Addresses.PostalCode | String | Postal code of the address. | +| AWS.IAMIdentityCenter.User.Addresses.Country | String | Country of the address. | +| AWS.IAMIdentityCenter.User.Addresses.Formatted | String | Formatted version of the address for display. | +| AWS.IAMIdentityCenter.User.Addresses.Type | String | Type of address. | +| AWS.IAMIdentityCenter.User.Addresses.Primary | String | Indicates whether this is the primary address. | +| AWS.IAMIdentityCenter.User.PhoneNumbers.Value | String | Phone number associated with the user. | +| AWS.IAMIdentityCenter.User.PhoneNumbers.Type | String | Type of phone number. | +| AWS.IAMIdentityCenter.User.PhoneNumbers.Primary | String | Indicates whether this is the primary phone number. | +| AWS.IAMIdentityCenter.User.UserType | String | Type of user. | +| AWS.IAMIdentityCenter.User.Title | String | Title of the user. | +| AWS.IAMIdentityCenter.User.PreferredLanguage | String | Preferred language of the user. | +| AWS.IAMIdentityCenter.User.Locale | String | Geographical region or location of the user. | +| AWS.IAMIdentityCenter.User.Timezone | String | Time zone of the user. | +| AWS.IAMIdentityCenter.User.IdentityStoreId | String | Globally unique identifier for the identity store. | +| AWS.IAMIdentityCenter.UserNextToken | String | Pagination token. | + +#### Command example +```!aws-iam-identitycenter-list-users``` +#### Context Example +```json +{ + "AWS": { + "IAMIdentityCenter": { + "User": [ + { + "DisplayName": "John Doe", + "Emails": [ + { + "Value": "johnDoe@gmail.com" + } + ], + "IdentityStoreId": "d-9967750fbd", + "Name": { + "FamilyName": "Doe", + "GivenName": "John" + }, + "UserId": "8374c852-10e1-70e2-8996-5b0d54bf8ccd", + "UserName": "johndoe" + }, + ], + "UserNextToken": null + } + } +} +``` + +#### Human Readable Output + +>### AWS IAM Identity Center Users +>|DisplayName|Emails|UserId|UserName| +>|---|---|---|---| +>| johndoe | johnDoe@gmail.com | 8374c852-10e1-70e2-8996-5b0d54bf8ccd | johndoe | + + ### aws-iam-identitycenter-list-groups @@ -112,18 +297,58 @@ Lists all the IAM groups in the AWS account. | **Argument Name** | **Description** | **Required** | | --- | --- | --- | +| region | The AWS Region. If not specified, the default region will be used. Possible values are: us-east-1, us-east-2, us-west-1, us-west-2, ca-central-1, eu-west-1, eu-central-1, eu-west-2, ap-northeast-1, ap-northeast-2, ap-southeast-1, ap-southeast-2, ap-south-1, sa-east-1, eu-north-1, eu-west-3. | Optional | +| roleArn | The Amazon Resource Name. | Optional | +| roleSessionDuration | Role session duration. | Optional | +| roleSessionName | Role session name. | Optional | +| IdentityStoreId | Identity store ID. | Optional | +| limit | Number of results to return. Default is 50. | Optional | +| nextToken | The pagination token. | Optional | #### Context Output | **Path** | **Type** | **Description** | | --- | --- | --- | -| AWS.IAM.IdentityCenter.Groups.GroupName | string | The friendly name that identifies the group. | -| AWS.IAM.IdentityCenter.Groups.GroupId | string | The stable and unique string identifying the group. | +| AWS.IAMIdentityCenter.Group.GroupId | String | The identifier for a group in the identity store. | +| AWS.IAMIdentityCenter.Group.DisplayName | String | The display name value for the group. | +| AWS.IAMIdentityCenter.Group.ExternalIds.Issuer | String | The issuer for an external identifier. | +| AWS.IAMIdentityCenter.Group.ExternalIds.Id | String | The identifier issued to this resource by an external identity provider. | +| AWS.IAMIdentityCenter.Group.Description | String | A description of the specified group. | +| AWS.IAMIdentityCenter.Group.IdentityStoreId | String | The globally unique identifier for the identity store. | +| AWS.IAMIdentityCenter.GroupNextToken | String | The pagination token used for the ListUsers and ListGroups API operations. | + +#### Command example +```!aws-iam-identitycenter-list-groups``` +#### Context Example +```json +{ + "AWS": { + "IAMIdentityCenter": { + "Group": [ + { + "DisplayName": "new", + "GroupId": "53142802-e001-7004-9134-9e6e4e1e10c0", + "IdentityStoreId": "d-9967750fbd" + } + ], + "GroupNextToken": null + } + } +} +``` + +#### Human Readable Output + +>### AWS IAM Identity Center Groups +>|DisplayName|GroupId| +>|---|---| +>| new | 53142802-e001-7004-9134-9e6e4e1e10c0 | + ### aws-iam-identitycenter-list-groups-for-user *** -Lists the IAM groups that the specified IAM user belongs to. +Lists the IAM Identity Center groups that the specified IAM user belongs to. #### Base Command @@ -133,14 +358,53 @@ Lists the IAM groups that the specified IAM user belongs to. | **Argument Name** | **Description** | **Required** | | --- | --- | --- | +| region | The AWS Region. If not specified, the default region will be used. Possible values are: us-east-1, us-east-2, us-west-1, us-west-2, ca-central-1, eu-west-1, eu-central-1, eu-west-2, ap-northeast-1, ap-northeast-2, ap-southeast-1, ap-southeast-2, ap-south-1, sa-east-1, eu-north-1, eu-west-3. | Optional | +| roleArn | The Amazon Resource Name. | Optional | +| roleSessionDuration | Role session duration. | Optional | +| roleSessionName | Role session name. | Optional | +| IdentityStoreId | Identity store ID. | Optional | +| limit | Number of results to return. Default is 50. | Optional | +| nextToken | The pagination token. | Optional | | userName | The name of the user to list groups for. | Required | #### Context Output | **Path** | **Type** | **Description** | | --- | --- | --- | -| AWS.IAM.IdentityCenter.Users.GroupMemeberships.GroupName | string | The friendly name that identifies the group. | -| AWS.IAM.IdentityCenter.Users.GroupMemeberships.GroupId | string | The stable and unique string identifying the group. | +| AWS.IAMIdentityCenter.User.UserId | string | User ID. | +| AWS.IAMIdentityCenter.User.GroupMemeberships.MembershipId | string | The friendly name that identifies the group. | +| AWS.IAMIdentityCenter.User.GroupMemeberships.GroupId | string | The stable and unique string identifying the group. | + + +#### Command example +```!aws-iam-identitycenter-list-groups-for-user userName=johndoe``` +#### Context Example +```json +{ + "AWS": { + "IAMIdentityCenter": { + "GroupsUserNextToken":null, + "User":{ + "GroupMemberships":[ + { + "GroupId":"a3948882-5051-7090-524c-c8c850bf1919", + "MembershipId":"e374b872-9011-7000-c847-55fdcc299204", + } + ], + "UserId":"c3f438a2-e041-7033-75e8-63eb8c64b0e4" + } + } + } +} +``` + +#### Human Readable Output + +>### AWS IAM Identity Center Groups +>|GroupID|MembershipID|UserID| +>|---|---|---| +>| a3948882-5051-7090-524c-c8c850bf1919 | e374b872-9011-7000-c847-55fdcc299204 | c3f438a2-e041-7033-75e8-63eb8c64b0e4 | + ### aws-iam-identitycenter-add-user-to-group @@ -155,12 +419,22 @@ Adds the specified user to the specified group. | **Argument Name** | **Description** | **Required** | | --- | --- | --- | +| region | The AWS Region. If not specified, the default region will be used. Possible values are: us-east-1, us-east-2, us-west-1, us-west-2, ca-central-1, eu-west-1, eu-central-1, eu-west-2, ap-northeast-1, ap-northeast-2, ap-southeast-1, ap-southeast-2, ap-south-1, sa-east-1, eu-north-1, eu-west-3. | Optional | +| roleArn | The Amazon Resource Name. | Optional | +| roleSessionDuration | Role session duration. | Optional | +| roleSessionName | Role session name. | Optional | +| IdentityStoreId | Identity store ID. | Optional | | userName | The name of the user to add. | Required | | groupName | The name of the group to update. | Required | #### Context Output There is no context output for this command. +#### Command example +```!aws-iam-identitycenter-add-user-to-group groupName=NewGroup userName=johndoe``` +#### Human Readable Output + +>The membership id 4314c862-b0c1-705e-d5da-ccf59fd045f3 has been successfully created. ### aws-iam-identitycenter-get-group @@ -175,51 +449,412 @@ Get AWS IAM Identity Center group Information. | **Argument Name** | **Description** | **Required** | | --- | --- | --- | -| groupName | The name of the group to search. | Required | +| region | The AWS Region. If not specified, the default region will be used. Possible values are: us-east-1, us-east-2, us-west-1, us-west-2, ca-central-1, eu-west-1, eu-central-1, eu-west-2, ap-northeast-1, ap-northeast-2, ap-southeast-1, ap-southeast-2, ap-south-1, sa-east-1, eu-north-1, eu-west-3. | Optional | +| roleArn | The Amazon Resource Name. | Optional | +| roleSessionDuration | Role session duration. | Optional | +| roleSessionName | Role session name. | Optional | +| IdentityStoreId | Identity store ID. | Optional | +| displayName | The name of the group to search. | Required | + +#### Context Output + +| **Path** | **Type** | **Description** | +| --- | --- | --- | +| AWS.IAMIdentityCenter.Group.GroupId | String | The identifier for a group in the identity store. | +| AWS.IAMIdentityCenter.Group.DisplayName | String | The display name of the group. | +| AWS.IAMIdentityCenter.Group.ExternalIds.Issuer | String | The issuer for an external identifier. | +| AWS.IAMIdentityCenter.Group.ExternalIds.Id | String | The identifier issued to this resource by an external identity provider. | +| AWS.IAMIdentityCenter.Group.Description | String | A description of the group. | +| AWS.IAMIdentityCenter.Group.IdentityStoreId | String | The globally unique identifier for the identity store. | + +#### Command example +```!aws-iam-identitycenter-get-group displayName=NewGroup``` +#### Context Example +```json +{ + "AWS": { + "IAMIdentityCenter": { + "Group": { + "Description": "New", + "DisplayName": "NewGroup", + "GroupId": "f3a478d2-50b1-7078-81a4-c97c703007f3", + "IdentityStoreId": "d-9967750fbd" + } + } + } +} +``` + +#### Human Readable Output + +>### AWS IAM Identity Center Groups +>|DisplayName|GroupId| +>|---|---| +>| NewGroup | f3a478d2-50b1-7078-81a4-c97c703007f3 | + + +### aws-iam-identitycenter-get-user-by-email + +*** +Retrieves information about the specified IAM user. + +#### Base Command + +`aws-iam-identitycenter-get-user-by-email` + +#### Input + +| **Argument Name** | **Description** | **Required** | +| --- | --- | --- | +| region | The AWS Region. If not specified, the default region will be used. Possible values are: us-east-1, us-east-2, us-west-1, us-west-2, ca-central-1, eu-west-1, eu-central-1, eu-west-2, ap-northeast-1, ap-northeast-2, ap-southeast-1, ap-southeast-2, ap-south-1, sa-east-1, eu-north-1, eu-west-3. | Optional | +| roleArn | The Amazon Resource Name. | Optional | +| roleSessionDuration | Role session duration. | Optional | +| roleSessionName | Role session name. | Optional | +| IdentityStoreId | Identity store ID. | Optional | +| emailAddress | The email of the user. | Required | + +#### Context Output + +| **Path** | **Type** | **Description** | +| --- | --- | --- | +| AWS.IAMIdentityCenter.User.UserName | string | The friendly name identifying the user. | +| AWS.IAMIdentityCenter.User.UserId | string | The stable and unique string identifying the user. | +| AWS.IAMIdentityCenter.User.ExternalIds.Issuer | String | The issuer for an external identifier. | +| AWS.IAMIdentityCenter.User.ExternalIds.Id | String | The identifier issued to this resource by an external identity provider. | +| AWS.IAMIdentityCenter.User.Name.Formatted | String | Formatted version of the user's name for display. | +| AWS.IAMIdentityCenter.User.Name.FamilyName | String | The family name of the user. | +| AWS.IAMIdentityCenter.User.Name.GivenName | String | The given name of the user. | +| AWS.IAMIdentityCenter.User.Name.MiddleName | String | The middle name of the user. | +| AWS.IAMIdentityCenter.User.Name.HonorificPrefix | String | The honorific prefix of the user. | +| AWS.IAMIdentityCenter.User.Name.HonorificSuffix | String | The honorific suffix of the user. | +| AWS.IAMIdentityCenter.User.DisplayName | String | The name of the user formatted for display when referenced. | +| AWS.IAMIdentityCenter.User.NickName | String | An alternate name for the user. | +| AWS.IAMIdentityCenter.User.ProfileUrl | String | URL associated with the user. | +| AWS.IAMIdentityCenter.User.Emails.Value | String | Email address associated with the user. | +| AWS.IAMIdentityCenter.User.Emails.Type | String | Type of email address. | +| AWS.IAMIdentityCenter.User.Emails.Primary | String | Indicates whether this is the primary email address. | +| AWS.IAMIdentityCenter.User.Addresses.StreetAddress | String | Street address. | +| AWS.IAMIdentityCenter.User.Addresses.Locality | String | Address locality. | +| AWS.IAMIdentityCenter.User.Addresses.Region | String | Region of the address. | +| AWS.IAMIdentityCenter.User.Addresses.PostalCode | String | Postal code of the address. | +| AWS.IAMIdentityCenter.User.Addresses.Country | String | Country of the address. | +| AWS.IAMIdentityCenter.User.Addresses.Formatted | String | Formatted version of the address for display. | +| AWS.IAMIdentityCenter.User.Addresses.Type | String | Type of address. | +| AWS.IAMIdentityCenter.User.Addresses.Primary | String | Indicates whether this is the primary address. | +| AWS.IAMIdentityCenter.User.PhoneNumbers.Value | String | Phone number associated with the user. | +| AWS.IAMIdentityCenter.User.PhoneNumbers.Type | String | Type of phone number. | +| AWS.IAMIdentityCenter.User.PhoneNumbers.Primary | String | Indicates whether this is the primary phone number. | +| AWS.IAMIdentityCenter.User.UserType | String | Type of user. | +| AWS.IAMIdentityCenter.User.Title | String | Title of the user. | +| AWS.IAMIdentityCenter.User.PreferredLanguage | String | Preferred language of the user. | +| AWS.IAMIdentityCenter.User.Locale | String | Geographical region or location of the user. | +| AWS.IAMIdentityCenter.User.Timezone | String | Time zone of the user. | +| AWS.IAMIdentityCenter.User.IdentityStoreId | String | Globally unique identifier for the identity store. | + + +#### Command example +```!aws-iam-identitycenter-get-user-by-email emailAddress=johnDoe@gmail.com``` +#### Context Example +```json +{ + "AWS": { + "IAMIdentityCenter": { + "User": { + "DisplayName": "John Doe", + "Emails": [ + { + "Primary": true, + "Type": "work", + "Value": "johnDoe@gmail.com" + } + ], + "IdentityStoreId": "d-9967750fbd", + "Name": { + "FamilyName": "Doe", + "GivenName": "John" + }, + "UserId": "13746842-e011-70fe-14fe-600d496510f0", + "UserName": "johndoe", + } + } + } +} +``` + +#### Human Readable Output + +>### AWS IAM Identity Center Users +>|DisplayName|Emails|UserId|UserName| +>|---|---|---|---| +>| John Doe | johnDoe@gmail.com | 13746842-e011-70fe-14fe-600d496510f0 | johndoe | + +### aws-iam-identitycenter-list-memberships + +*** +Lists the memberships of the group. + +#### Base Command + +`aws-iam-identitycenter-list-memberships` + +#### Input + +| **Argument Name** | **Description** | **Required** | +| --- | --- | --- | +| region | The AWS Region. If not specified, the default region will be used. Possible values are: us-east-1, us-east-2, us-west-1, us-west-2, ca-central-1, eu-west-1, eu-central-1, eu-west-2, ap-northeast-1, ap-northeast-2, ap-southeast-1, ap-southeast-2, ap-south-1, sa-east-1, eu-north-1, eu-west-3. | Optional | +| roleArn | The Amazon Resource Name. | Optional | +| roleSessionDuration | Role session duration. | Optional | +| roleSessionName | Role session name. | Optional | +| IdentityStoreId | Identity store ID. | Optional | +| limit | Number of results to return. Default is 50. | Optional | +| nextToken | The pagination token. | Optional | +| groupName | The name of the group to list the memberships. | Required | + +#### Context Output + +| **Path** | **Type** | **Description** | +| --- | --- | --- | +| AWS.IAMIdentityCenter.GroupMemberships.IdentityStoreId | String | The globally unique identifier for the identity store. | +| AWS.IAMIdentityCenter.Group.GroupMemberships.MembershipId | String | The identifier for a GroupMembership object in an identity store. | +| AWS.IAMIdentityCenter.Group.GroupId | String | The identifier for a group in the identity store. | +| AWS.IAMIdentityCenter.Group.GroupMemberships.UserId | String | Identifier of resources that can be members. | +| AWS.IAMIdentityCenter.GroupMembershipNextToken | String | The pagination token. | + + +#### Command example +```!aws-iam-identitycenter-list-memberships groupName=NewGroup``` +#### Context Example +```json +{ + "AWS": { + "IAMIdentityCenter": { + "Group": { + "GroupMemberships":[ + { + "MembershipId":"e374b872-9011-7000-c847-55fdcc299204", + "UserId":"c3f438a2-e041-7033-75e8-63eb8c64b0e4" + } + ] + }, + "GroupMembershipNextToken":null + } + } +} +``` + +#### Human Readable Output + +>|GroupId|MembershipId|UserId| +>|---|---| +>| a3948882-5051-7090-524c-c8c850bf1919 | e374b872-9011-7000-c847-55fdcc299204 | c3f438a2-e041-7033-75e8-63eb8c64b0e4 | + + +### aws-iam-identitycenter-delete-user + +*** +Removes the specified user from the AWS IAM Identity Center. + +#### Base Command + +`aws-iam-identitycenter-delete-user` + +#### Input + +| **Argument Name** | **Description** | **Required** | +| --- | --- | --- | +| region | The AWS Region. If not specified, the default region will be used. Possible values are: us-east-1, us-east-2, us-west-1, us-west-2, ca-central-1, eu-west-1, eu-central-1, eu-west-2, ap-northeast-1, ap-northeast-2, ap-southeast-1, ap-southeast-2, ap-south-1, sa-east-1, eu-north-1, eu-west-3. | Optional | +| roleArn | The Amazon Resource Name. | Optional | +| roleSessionDuration | Role session duration. | Optional | +| roleSessionName | Role session name. | Optional | +| IdentityStoreId | Identity store ID. | Optional | +| userName | The name of the user to remove. | Required | #### Context Output There is no context output for this command. +#### Command example +```!aws-iam-identitycenter-delete-user userName=johndoe``` +#### Human Readable Output -### aws-iam-identitycenter-remove-user-from-all-groups +>The User 634418e2-20c1-703e-4358-a8312472c85d has been removed. + +### aws-iam-identitycenter-delete-group *** -This will remove the entered user from all groups/memberships. +Removes the specified group from the IAM Identity Center. #### Base Command -`aws-iam-identitycenter-remove-user-from-all-groups` +`aws-iam-identitycenter-delete-group` #### Input | **Argument Name** | **Description** | **Required** | | --- | --- | --- | -| userName | Username that will be removed from all groups. | Required | +| region | The AWS Region. If not specified, the default region will be used. Possible values are: us-east-1, us-east-2, us-west-1, us-west-2, ca-central-1, eu-west-1, eu-central-1, eu-west-2, ap-northeast-1, ap-northeast-2, ap-southeast-1, ap-southeast-2, ap-south-1, sa-east-1, eu-north-1, eu-west-3. | Optional | +| roleArn | The Amazon Resource Name. | Optional | +| roleSessionDuration | Role session duration. | Optional | +| roleSessionName | Role session name. | Optional | +| IdentityStoreId | Identity store ID. | Optional | +| groupName | The name of the group to remove. | Required | #### Context Output There is no context output for this command. +#### Command example +```!aws-iam-identitycenter-delete-group groupName=NewGroup``` +#### Human Readable Output -### aws-iam-identitycenter-get-user-by-email +>The Group f3a478d2-50b1-7078-81a4-c97c703007f3 has been removed. + +### aws-iam-identitycenter-create-group *** -This will get user information using email address. +Creates a new IAM Identity Center group for your AWS account. #### Base Command -`aws-iam-identitycenter-get-user-by-email` +`aws-iam-identitycenter-create-group` #### Input | **Argument Name** | **Description** | **Required** | | --- | --- | --- | -| emailAddress | The email of the user to be removed. | Optional | +| region | The AWS Region. If not specified, the default region will be used. Possible values are: us-east-1, us-east-2, us-west-1, us-west-2, ca-central-1, eu-west-1, eu-central-1, eu-west-2, ap-northeast-1, ap-northeast-2, ap-southeast-1, ap-southeast-2, ap-south-1, sa-east-1, eu-north-1, eu-west-3. | Optional | +| roleArn | The Amazon Resource Name. | Optional | +| roleSessionDuration | Role session duration. | Optional | +| roleSessionName | Role session name. | Optional | +| IdentityStoreId | Identity store ID. | Optional | +| displayName | The name of the group to create. | Required | +| description | The description of the group to create. | Optional | #### Context Output | **Path** | **Type** | **Description** | | --- | --- | --- | -| AWS.IAM.IdentityCenter.Users.UserName | string | The friendly name identifying the user. | -| AWS.IAM.IdentityCenter.Users.Email | string | The email address identifying the user. | -| AWS.IAM.IdentityCenter.Users.UserId | string | The user ID of the queried user. | -| AWS.IAM.IdentityCenter.Users.DisplayName | string | The display name of the queried user. | +| AWS.IAMIdentityCenter.Group.GroupId | string | The user ID. | +| AWS.IAMIdentityCenter.Group.IdentityStoreId | string | Identity store ID. | + +#### Command example +```!aws-iam-identitycenter-create-group description=New displayName=NewGroup``` +#### Context Example +```json +{ + "AWS": { + "IAMIdentityCenter": { + "Group": { + "GroupId": "f3a478d2-50b1-7078-81a4-c97c703007f3", + "IdentityStoreId": "d-9967750fbd" + } + } + } +} +``` + +#### Human Readable Output + +>### Group NewGroup has been successfully created with id f3a478d2-50b1-7078-81a4-c97c703007f3 +>|GroupId|IdentityStoreId| +>|---|---| +>| f3a478d2-50b1-7078-81a4-c97c703007f3 | d-9967750fbd | + + +### aws-iam-identitycenter-update-group + +*** +Updates an IAM Identity Center group for your AWS account. + +#### Base Command + +`aws-iam-identitycenter-update-group` + +#### Input + +| **Argument Name** | **Description** | **Required** | +| --- | --- | --- | +| region | The AWS Region. If not specified, the default region will be used. Possible values are: us-east-1, us-east-2, us-west-1, us-west-2, ca-central-1, eu-west-1, eu-central-1, eu-west-2, ap-northeast-1, ap-northeast-2, ap-southeast-1, ap-southeast-2, ap-south-1, sa-east-1, eu-north-1, eu-west-3. | Optional | +| roleArn | The Amazon Resource Name. | Optional | +| roleSessionDuration | Role session duration. | Optional | +| roleSessionName | Role session name. | Optional | +| IdentityStoreId | Identity store ID. | Optional | +| displayName | The name of the group to update. | Required | +| description | The description of the group to update. | Required | + +#### Context Output + +There is no context output for this command. +#### Command example +```!aws-iam-identitycenter-update-group description=changed displayName=NewGroup``` +#### Human Readable Output + +>Group NewGroup has been successfully updated + +### aws-iam-identitycenter-update-user + +*** +Updates an IAM Identity Center user for your AWS account. + +#### Base Command + +`aws-iam-identitycenter-update-user` + +#### Input + +| **Argument Name** | **Description** | **Required** | +| --- | --- | --- | +| userName | The username of the user to update. | Required | +| familyName | The family name of the user to update. | Optional | +| givenName | The first name of the user to update. | Optional | +| userEmailAddressPrimary | Is this the primary email address for the associated resource. Possible values are: yes, no. | Optional | +| userEmailAddress | The email address of the user to update. | Optional | +| displayName | The display name of the user to update. | Optional | +| profileUrl | The profile URL of the user to update. | Optional | +| userType | The type of the user to update. | Optional | +| title | The title of the user to update. | Optional | +| region | The AWS Region. If not specified, the default region will be used. Possible values are: us-east-1, us-east-2, us-west-1, us-west-2, ca-central-1, eu-west-1, eu-central-1, eu-west-2, ap-northeast-1, ap-northeast-2, ap-southeast-1, ap-southeast-2, ap-south-1, sa-east-1, eu-north-1, eu-west-3. | Optional | +| roleArn | The Amazon Resource Name. | Optional | +| roleSessionDuration | Role session duration. | Optional | +| roleSessionName | Role session name. | Optional | +| IdentityStoreId | Identity store ID. | Optional | + +#### Context Output + +There is no context output for this command. + + +#### Command example +```!aws-iam-identitycenter-update-user userName=johndoe familyName=changed``` + +#### Human Readable Output + +>User johndoe has been successfully updated + +### aws-iam-identitycenter-delete-group-membership + +*** +Deletes a user from all groups if a username is provided, or deletes multiple memberships if a list of memberships is provided. + +#### Base Command + +`aws-iam-identitycenter-delete-group-membership` + +#### Input + +| **Argument Name** | **Description** | **Required** | +| --- | --- | --- | +| region | The AWS Region. If not specified, the default region will be used. Possible values are: us-east-1, us-east-2, us-west-1, us-west-2, ca-central-1, eu-west-1, eu-central-1, eu-west-2, ap-northeast-1, ap-northeast-2, ap-southeast-1, ap-southeast-2, ap-south-1, sa-east-1, eu-north-1, eu-west-3. | Optional | +| roleArn | The Amazon Resource Name. | Optional | +| roleSessionDuration | Role session duration. | Optional | +| roleSessionName | Role session name. | Optional | +| IdentityStoreId | Identity store ID. | Optional | +| userName | The name of the user to delete from all groups. | Optional | +| membershipId | Comma-separated list of membership IDs to delete. | Optional | + +#### Context Output + +There is no context output for this command. +#### Command example +```!aws-iam-identitycenter-delete-group-membership userName=johndoe``` +#### Human Readable Output + +>User is not member of any group. \ No newline at end of file diff --git a/Packs/AWS-IAMIdentityCenter/Integrations/AWSIAMIdentityCenter/command_examples.txt b/Packs/AWS-IAMIdentityCenter/Integrations/AWSIAMIdentityCenter/command_examples.txt new file mode 100644 index 000000000000..4334217eec69 --- /dev/null +++ b/Packs/AWS-IAMIdentityCenter/Integrations/AWSIAMIdentityCenter/command_examples.txt @@ -0,0 +1,15 @@ +!aws-iam-identitycenter-create-group description=New displayName=NewGroup +!aws-iam-identitycenter-create-user displayName=example familyName=fam givenName=example userName=exampleName userEmailAddress=test@example.com +!aws-iam-identitycenter-get-group displayName=NewGroup +!aws-iam-identitycenter-update-group description=changed displayName=NewGroup +!aws-iam-identitycenter-update-user userName=example familyName=changed +!aws-iam-identitycenter-get-user userName=exampleName +!aws-iam-identitycenter-add-user-to-group groupName=NewGroup userName=exampleName +!aws-iam-identitycenter-delete-group groupName=NewGroup +!aws-iam-identitycenter-delete-group-membership userName=exampleName +!aws-iam-identitycenter-delete-user userName=exampleName +!aws-iam-identitycenter-get-user-by-email emailAddress=test@example.com +!aws-iam-identitycenter-list-users +!aws-iam-identitycenter-list-groups +!aws-iam-identitycenter-list-groups-for-user userName=exampleName +!aws-iam-identitycenter-list-memberships groupName=NewGroup diff --git a/Packs/AWS-IAMIdentityCenter/ReleaseNotes/1_0_2.json b/Packs/AWS-IAMIdentityCenter/ReleaseNotes/1_0_2.json new file mode 100644 index 000000000000..3aed6f494ecc --- /dev/null +++ b/Packs/AWS-IAMIdentityCenter/ReleaseNotes/1_0_2.json @@ -0,0 +1,4 @@ +{ + "breakingChanges": true, + "breakingChangesNotes": "Adopted integration to XSOAR, resulting in significant breaking changes. See the release notes for more information." +} \ No newline at end of file diff --git a/Packs/AWS-IAMIdentityCenter/ReleaseNotes/1_0_2.md b/Packs/AWS-IAMIdentityCenter/ReleaseNotes/1_0_2.md new file mode 100644 index 000000000000..ca9a80568f37 --- /dev/null +++ b/Packs/AWS-IAMIdentityCenter/ReleaseNotes/1_0_2.md @@ -0,0 +1,77 @@ + +#### Integrations + +##### AWS - IAM Identity Center + +- Updated the context path prefix to be `AWS.IAMIdentityCenter` instead of `AWS.IAM`. +- Created a new playbook. +- Added 6 commands: + - ***aws-iam-identitycenter-delete-user*** + - ***aws-iam-identitycenter-create-group*** + - ***aws-iam-identitycenter-delete-group*** + - ***aws-iam-identitycenter-list-memberships*** + - ***aws-iam-identitycenter-update-user*** + - ***aws-iam-identitycenter-update-group*** +- Added support for the following arguments in the ***aws-iam-identitycenter-create-user***, ***aws-iam-identitycenter-list-users***, ***aws-iam-identitycenter-get-user***, ***aws-iam-identitycenter-get-user-by-email***, ***aws-iam-identitycenter-list-groups***,***aws-iam-identitycenter-get-group***, ***aws-iam-identitycenter-add-user-to-group***, ***aws-iam-identitycenter-list-groups-for-user*** and ***aws-iam-identitycenter-delete-group-membership*** commands. + - *identityStoreId* + - *region* + - *roleArn* + - *roleSessionDuration* + - *roleSessionName* +- Added the full API responses to the context data in the following commands: + - ***aws-iam-identitycenter-create-user*** + - ***aws-iam-identitycenter-list-users*** + - ***aws-iam-identitycenter-get-user*** + - ***aws-iam-identitycenter-get-user-by-email*** + - ***aws-iam-identitycenter-list-groups*** + - ***aws-iam-identitycenter-get-group*** + - ***aws-iam-identitycenter-list-groups-for-user*** +- Updated the context path to be `AWS.IAMIdentityCenter.User` in the following commands: + - ***aws-iam-identitycenter-create-user*** + - ***aws-iam-identitycenter-list-users*** + - ***aws-iam-identitycenter-get-user*** + - ***aws-iam-identitycenter-get-user-by-email*** + - ***aws-iam-identitycenter-list-groups-for-user*** +- Updated the context path to be `AWS.IAMIdentityCenter.Group` in the following commands: + - ***aws-iam-identitycenter-list-groups*** + - ***aws-iam-identitycenter-get-group*** +- Deprecated the ResponseMetadata from context data and human readable in the following commands: + - ***aws-iam-identitycenter-create-user*** + - ***aws-iam-identitycenter-get-user*** + - ***aws-iam-identitycenter-create-group*** + - ***aws-iam-identitycenter-get-group*** +- Added support for the following arguments in the ***aws-iam-identitycenter-list-users***, ***aws-iam-identitycenter-list-groups*** and ***aws-iam-identitycenter-list-groups-for-user*** commands. + - *limit* + - *nextToken* +- Added support for the following arguments in the ***aws-iam-identitycenter-create-user*** command. + - *userType* + - *profileUrl* + - *title* + - *region* +- Added the *UserNextToken* argument to the context data in path `AWS.IAMIdentityCenter` in the ***aws-iam-identitycenter-list-users*** command. +- Added the *GroupNextToken* argument to the context data in path `AWS.IAMIdentityCenter` in the ***aws-iam-identitycenter-list-groups*** command. +- Added the *GroupsUserNextToken* argument to the context data in path `AWS.IAMIdentityCenter.User` in the ***aws-iam-identitycenter-list-groups-for-user*** command. +- Deprecated the *groupName* argument and replaced it with the *displayName* argument in the ***aws-iam-identitycenter-get-group*** command. +- Updated the following parameters to not required. + - *identityStoreId* + - *roleArn* + - *roleSessionName* + - *accessKey* + - *secretKey* +- Added the *credentials* parameter. +- Added the *roleArnCredentials* parameter. +- Deprecated the ***aws-iam-identitycenter-remove-user-from-all-groups*** command and replaced it with the ***aws-iam-identitycenter-delete-group-membership*** command. +- Added the *membershipId* argument to the ***aws-iam-identitycenter-delete-group-membership*** command. +- Updated the *emailAddress* argument to not required in the ***aws-iam-identitycenter-get-user-by-email*** command. + + + + + + +- Updated the Docker image to: *demisto/boto3py3:1.0.0.94100*. + +##### AWS - IAM Identity Center + +- + diff --git a/Packs/AWS-IAMIdentityCenter/TestPlaybooks/AWS-IAMIdentityCenter-Test.yml b/Packs/AWS-IAMIdentityCenter/TestPlaybooks/AWS-IAMIdentityCenter-Test.yml new file mode 100644 index 000000000000..a32094e5f4af --- /dev/null +++ b/Packs/AWS-IAMIdentityCenter/TestPlaybooks/AWS-IAMIdentityCenter-Test.yml @@ -0,0 +1,511 @@ +id: AWS-IAMIdentityCenter-Test +version: -1 +name: AWS-IAMIdentityCenter-Test +starttaskid: "0" +tasks: + "0": + id: "0" + taskid: e68d5257-dde6-4738-8461-926b3f675503 + type: start + task: + id: e68d5257-dde6-4738-8461-926b3f675503 + version: -1 + name: "" + iscommand: false + brand: "" + description: '' + nexttasks: + '#none#': + - "1" + - "2" + separatecontext: false + continueonerrortype: "" + view: |- + { + "position": { + "x": 450, + "y": 50 + } + } + note: false + timertriggers: [] + ignoreworker: false + skipunavailable: false + quietmode: 0 + isoversize: false + isautoswitchedtoquietmode: false + "1": + id: "1" + taskid: a130e17b-50f6-4a05-80a2-5f7040656031 + type: regular + task: + id: a130e17b-50f6-4a05-80a2-5f7040656031 + version: -1 + name: Create User + description: Creates a new IAM Identity Center user for your AWS account. + script: AWS - IAM Identity Center|||aws-iam-identitycenter-create-user + type: regular + iscommand: true + brand: AWS - IAM Identity Center + nexttasks: + '#none#': + - "9" + scriptarguments: + displayName: + simple: test + familyName: + simple: testFam + givenName: + simple: test + userEmailAddress: + simple: test@example.com + userName: + simple: test + separatecontext: false + continueonerrortype: "" + view: |- + { + "position": { + "x": 650, + "y": 220 + } + } + note: false + timertriggers: [] + ignoreworker: false + skipunavailable: false + quietmode: 0 + isoversize: false + isautoswitchedtoquietmode: false + "2": + id: "2" + taskid: 6713ff9f-33f1-4bbe-8060-a12cfd156979 + type: regular + task: + id: 6713ff9f-33f1-4bbe-8060-a12cfd156979 + version: -1 + name: Create Group + description: Creates a new IAM Identity Center group for your AWS account. + script: AWS - IAM Identity Center|||aws-iam-identitycenter-create-group + type: regular + iscommand: true + brand: AWS - IAM Identity Center + nexttasks: + '#none#': + - "6" + scriptarguments: + description: + simple: new + displayName: + simple: GroupName + separatecontext: false + continueonerrortype: "" + view: |- + { + "position": { + "x": 230, + "y": 220 + } + } + note: false + timertriggers: [] + ignoreworker: false + skipunavailable: false + quietmode: 0 + isoversize: false + isautoswitchedtoquietmode: false + "4": + id: "4" + taskid: caa168bc-b412-4d7f-8c21-a4fd44b0e58d + type: regular + task: + id: caa168bc-b412-4d7f-8c21-a4fd44b0e58d + version: -1 + name: Get User + description: Retrieves information about the specified IAM user, including the user creation date, path, unique ID, and ARN. + script: AWS - IAM Identity Center|||aws-iam-identitycenter-get-user + type: regular + iscommand: true + brand: AWS - IAM Identity Center + nexttasks: + '#none#': + - "5" + scriptarguments: + userName: + simple: test + separatecontext: false + continueonerrortype: "" + view: |- + { + "position": { + "x": 650, + "y": 520 + } + } + note: false + timertriggers: [] + ignoreworker: false + skipunavailable: false + quietmode: 0 + isoversize: false + isautoswitchedtoquietmode: false + "5": + id: "5" + taskid: 098dddb7-a148-46f6-8907-3cd2211e97d0 + type: regular + task: + id: 098dddb7-a148-46f6-8907-3cd2211e97d0 + version: -1 + name: Add User To Group + description: Adds the specified user to the specified group. + script: AWS - IAM Identity Center|||aws-iam-identitycenter-add-user-to-group + type: regular + iscommand: true + brand: AWS - IAM Identity Center + nexttasks: + '#none#': + - "7" + - "8" + scriptarguments: + groupName: + simple: ${AWS.IAMIdentityCenter.Group.DisplayName} + userName: + simple: ${AWS.IAMIdentityCenter.User.UserName} + separatecontext: false + continueonerrortype: "" + view: |- + { + "position": { + "x": 440, + "y": 680 + } + } + note: false + timertriggers: [] + ignoreworker: false + skipunavailable: false + quietmode: 0 + isoversize: false + isautoswitchedtoquietmode: false + "6": + id: "6" + taskid: 8d4dca5f-6d8f-4b38-82d2-fc8e3db6e898 + type: regular + task: + id: 8d4dca5f-6d8f-4b38-82d2-fc8e3db6e898 + version: -1 + name: Get Group + description: Get AWS IAM Identity Center group Information. + script: AWS - IAM Identity Center|||aws-iam-identitycenter-get-group + type: regular + iscommand: true + brand: AWS - IAM Identity Center + nexttasks: + '#none#': + - "5" + scriptarguments: + displayName: + simple: GroupName + separatecontext: false + continueonerrortype: "" + view: |- + { + "position": { + "x": 230, + "y": 370 + } + } + note: false + timertriggers: [] + ignoreworker: false + skipunavailable: false + quietmode: 0 + isoversize: false + isautoswitchedtoquietmode: false + "7": + id: "7" + taskid: 8f11f8eb-a3cd-406d-8a1b-d0e465dccff0 + type: regular + task: + id: 8f11f8eb-a3cd-406d-8a1b-d0e465dccff0 + version: -1 + name: List Groups For User + description: Lists the IAM Identity Center groups that the specified IAM user belongs to. + script: AWS - IAM Identity Center|||aws-iam-identitycenter-list-groups-for-user + type: regular + iscommand: true + brand: AWS - IAM Identity Center + nexttasks: + '#none#': + - "12" + scriptarguments: + userName: + simple: ${AWS.IAMIdentityCenter.User.UserName} + separatecontext: false + continueonerrortype: "" + view: |- + { + "position": { + "x": 660, + "y": 850 + } + } + note: false + timertriggers: [] + ignoreworker: false + skipunavailable: false + quietmode: 0 + isoversize: false + isautoswitchedtoquietmode: false + "8": + id: "8" + taskid: a4790ee6-e42f-4034-88fb-4884ee424983 + type: regular + task: + id: a4790ee6-e42f-4034-88fb-4884ee424983 + version: -1 + name: List Memberships + description: Lists the memberships of the group. + script: AWS - IAM Identity Center|||aws-iam-identitycenter-list-memberships + type: regular + iscommand: true + brand: AWS - IAM Identity Center + nexttasks: + '#none#': + - "12" + scriptarguments: + groupName: + simple: ${AWS.IAMIdentityCenter.Group.DisplayName} + separatecontext: false + continueonerrortype: "" + view: |- + { + "position": { + "x": 250, + "y": 850 + } + } + note: false + timertriggers: [] + ignoreworker: false + skipunavailable: false + quietmode: 0 + isoversize: false + isautoswitchedtoquietmode: false + "9": + id: "9" + taskid: 1eb53488-dfe9-4f38-8377-e37265933b83 + type: regular + task: + id: 1eb53488-dfe9-4f38-8377-e37265933b83 + version: -1 + name: Get User By Email + script: AWS - IAM Identity Center|||aws-iam-identitycenter-get-user-by-email + type: regular + iscommand: true + brand: AWS - IAM Identity Center + nexttasks: + '#none#': + - "4" + scriptarguments: + emailAddress: + simple: test@example.com + separatecontext: false + continueonerrortype: "" + view: |- + { + "position": { + "x": 650, + "y": 370 + } + } + note: false + timertriggers: [] + ignoreworker: false + skipunavailable: false + quietmode: 0 + isoversize: false + isautoswitchedtoquietmode: false + "10": + id: "10" + taskid: d777736d-5e32-4882-8ed6-41375039c764 + type: regular + task: + id: d777736d-5e32-4882-8ed6-41375039c764 + version: -1 + name: List Users + description: Lists the IAM users, returns all users in the AWS account. + script: AWS - IAM Identity Center|||aws-iam-identitycenter-list-users + type: regular + iscommand: true + brand: AWS - IAM Identity Center + nexttasks: + '#none#': + - "11" + separatecontext: false + continueonerrortype: "" + view: |- + { + "position": { + "x": 440, + "y": 1630 + } + } + note: false + timertriggers: [] + ignoreworker: false + skipunavailable: false + quietmode: 0 + isoversize: false + isautoswitchedtoquietmode: false + "11": + id: "11" + taskid: 4b35bd89-3e1c-489f-8a38-605442138b02 + type: regular + task: + id: 4b35bd89-3e1c-489f-8a38-605442138b02 + version: -1 + name: List Groups + description: Lists all the IAM groups in the AWS account. + script: AWS - IAM Identity Center|||aws-iam-identitycenter-list-groups + type: regular + iscommand: true + brand: AWS - IAM Identity Center + separatecontext: false + continueonerrortype: "" + view: |- + { + "position": { + "x": 440, + "y": 1830 + } + } + note: false + timertriggers: [] + ignoreworker: false + skipunavailable: false + quietmode: 0 + isoversize: false + isautoswitchedtoquietmode: false + "12": + id: "12" + taskid: 4cef1817-d067-424b-852f-c8c6b4d92a37 + type: regular + task: + id: 4cef1817-d067-424b-852f-c8c6b4d92a37 + version: -1 + name: Delete Group Membership + description: Deletes a user from all groups if a username is provided, or deletes multiple memberships if a list of memberships is provided. + script: AWS - IAM Identity Center|||aws-iam-identitycenter-delete-group-membership + type: regular + iscommand: true + brand: AWS - IAM Identity Center + nexttasks: + '#none#': + - "14" + scriptarguments: + userName: + simple: ${AWS.IAMIdentityCenter.User.UserName} + separatecontext: false + continueonerrortype: "" + view: |- + { + "position": { + "x": 440, + "y": 1065 + } + } + note: false + timertriggers: [] + ignoreworker: false + skipunavailable: false + quietmode: 0 + isoversize: false + isautoswitchedtoquietmode: false + "13": + id: "13" + taskid: d848ea88-8605-47f2-8081-a60447e0384c + type: regular + task: + id: d848ea88-8605-47f2-8081-a60447e0384c + version: -1 + name: Delete Group + description: Removes the entered group. + script: AWS - IAM Identity Center|||aws-iam-identitycenter-delete-group + type: regular + iscommand: true + brand: AWS - IAM Identity Center + nexttasks: + '#none#': + - "10" + scriptarguments: + groupName: + simple: ${AWS.IAMIdentityCenter.Group.DisplayName} + separatecontext: false + continueonerrortype: "" + view: |- + { + "position": { + "x": 440, + "y": 1435 + } + } + note: false + timertriggers: [] + ignoreworker: false + skipunavailable: false + quietmode: 0 + isoversize: false + isautoswitchedtoquietmode: false + "14": + id: "14" + taskid: 74865ca6-0986-41d5-8641-b44f9aec00ae + type: regular + task: + id: 74865ca6-0986-41d5-8641-b44f9aec00ae + version: -1 + name: Delete user + description: Removes the entered user. + script: AWS - IAM Identity Center|||aws-iam-identitycenter-delete-user + type: regular + iscommand: true + brand: AWS - IAM Identity Center + nexttasks: + '#none#': + - "13" + scriptarguments: + userName: + simple: ${AWS.IAMIdentityCenter.User.UserName} + separatecontext: false + continueonerrortype: "" + view: |- + { + "position": { + "x": 440, + "y": 1250 + } + } + note: false + timertriggers: [] + ignoreworker: false + skipunavailable: false + quietmode: 0 + isoversize: false + isautoswitchedtoquietmode: false +view: |- + { + "linkLabelsPosition": {}, + "paper": { + "dimensions": { + "height": 1875, + "width": 810, + "x": 230, + "y": 50 + } + } + } +inputs: [] +outputs: [] +quiet: true +description: If the creation of the user or group fails, please ensure to manually delete any existing users or groups that were not properly added. +fromversion: 6.10.0 diff --git a/Packs/AWS-IAMIdentityCenter/pack_metadata.json b/Packs/AWS-IAMIdentityCenter/pack_metadata.json index 5d34dff3e511..5b2ef2d89f4b 100644 --- a/Packs/AWS-IAMIdentityCenter/pack_metadata.json +++ b/Packs/AWS-IAMIdentityCenter/pack_metadata.json @@ -1,9 +1,9 @@ { "name": "AWS - IAM Identity Center", - "description": "AWS IAM Identity Center\n\nAuthor: Sameh El-Hakim\n\nWith AWS IAM Identity Center (successor to AWS Single Sign-On), you can manage sign-in security for your workforce identities, also known as workforce users. IAM Identity Center provides one place where you can create or connect workforce users and manage their access centrally across all their AWS accounts and applications. IAM Identity Center is the recommended approach for workforce authentication and authorization in AWS, for organizations of any size and type.", - "support": "community", - "currentVersion": "1.0.1", - "author": "Sameh El-Hakim", + "description": "AWS IAM Identity Center\n\nWith AWS IAM Identity Center (successor to AWS Single Sign-On), you can manage sign-in security for your workforce identities, also known as workforce users. IAM Identity Center provides one place where you can create or connect workforce users and manage their access centrally across all their AWS accounts and applications. IAM Identity Center is the recommended approach for workforce authentication and authorization in AWS, for organizations of any size and type.", + "support": "xsoar", + "currentVersion": "1.0.2", + "author": "Cortex XSOAR", "url": "", "email": "", "created": "2023-07-27T10:46:39Z", @@ -16,8 +16,5 @@ "marketplaces": [ "xsoar", "marketplacev2" - ], - "githubUser": [ - "sepaprivate" ] } \ No newline at end of file diff --git a/Packs/AWS-NetworkFirewall/Integrations/AWS-NetworkFirewall/AWS-NetworkFirewall.py b/Packs/AWS-NetworkFirewall/Integrations/AWS-NetworkFirewall/AWS-NetworkFirewall.py index 0ece4385446d..a624dfdace91 100755 --- a/Packs/AWS-NetworkFirewall/Integrations/AWS-NetworkFirewall/AWS-NetworkFirewall.py +++ b/Packs/AWS-NetworkFirewall/Integrations/AWS-NetworkFirewall/AWS-NetworkFirewall.py @@ -1052,10 +1052,8 @@ def main(): # pragma: no cover command=demisto.command())) if demisto.command() == 'test-module': # This is the call made when pressing the integration test button. - client = aws_session() - response = client.REPLACE_WITH_TEST_FUNCTION() - if response['ResponseMetadata']['HTTPStatusCode'] == 200: - demisto.results('ok') + response = list_firewalls_command(args) + demisto.results('ok') elif demisto.command() == 'aws-network-firewall-associate-firewall-policy': human_readable, outputs, response = associate_firewall_policy_command( diff --git a/Packs/AWS-NetworkFirewall/Integrations/AWS-NetworkFirewall/AWS-NetworkFirewall.yml b/Packs/AWS-NetworkFirewall/Integrations/AWS-NetworkFirewall/AWS-NetworkFirewall.yml index b87ebc972560..5f66c6c341a6 100755 --- a/Packs/AWS-NetworkFirewall/Integrations/AWS-NetworkFirewall/AWS-NetworkFirewall.yml +++ b/Packs/AWS-NetworkFirewall/Integrations/AWS-NetworkFirewall/AWS-NetworkFirewall.yml @@ -1521,7 +1521,7 @@ script: - contextPath: AWS-NetworkFirewall.SubnetChangeProtection description: A setting indicating whether the firewall is protected against changes to the subnet associations. Use this setting to protect against accidentally modifying the subnet associations for a firewall that is in use. When you create a firewall, the operation initializes this setting to TRUE. type: Unknown - dockerimage: demisto/boto3py3:1.0.0.87655 + dockerimage: demisto/boto3py3:1.0.0.95377 runonce: false script: '-' subtype: python3 diff --git a/Packs/AWS-NetworkFirewall/Integrations/AWS-NetworkFirewall/command_examples.txt b/Packs/AWS-NetworkFirewall/Integrations/AWS-NetworkFirewall/command_examples.txt index 63a9bcf78d4a..528a7c5ec533 100644 --- a/Packs/AWS-NetworkFirewall/Integrations/AWS-NetworkFirewall/command_examples.txt +++ b/Packs/AWS-NetworkFirewall/Integrations/AWS-NetworkFirewall/command_examples.txt @@ -1,29 +1,29 @@ !aws-network-firewall-create-rule-group type=STATELESS rule_group_name=example-group-stateless capacity=10 rule_group_json="""{"RulesSource":{"StatelessRulesAndCustomActions":{"StatelessRules":[{"RuleDefinition":{"MatchAttributes":{"Sources":[{"AddressDefinition":"10.0.0.0/8"},{"AddressDefinition":"192.168.0.0/16"},{"AddressDefinition":"172.31.0.0/16"}]},"Actions":["aws:pass"]},"Priority":5}]}}}""" -!aws-network-firewall-create-firewall-policy firewall_policy_name=example-fw-policy firewall_policy_json="""{"StatelessRuleGroupReferences":[{"ResourceArn":"arn:aws:network-firewall:us-west-2:632480965664:stateless-rulegroup/example-group-stateless","Priority":100}],"StatelessDefaultActions":["aws:pass"],"StatelessFragmentDefaultActions":["aws:pass"]}""" -!aws-network-firewall-create-firewall firewall_name=myfirewall firewall_policy_arn=arn:aws:network-firewall:us-west-2:632480965664:firewall-policy/example-fw-policy subnet_mappings_subnet_ids=subnet-1787526f,subnet-901becda vpc_id=vpc-a77d05df -!aws-network-firewall-associate-firewall-policy firewall_policy_arn=arn:aws:network-firewall:us-west-2:632480965664:firewall-policy/example-fw-policy firewall_arn=arn:aws:network-firewall:us-west-2:632480965664:firewall/myfirewall -!aws-network-firewall-associate-subnets subnet_mappings_subnet_ids=subnet-1787526f,subnet-901becda firewall_arn=arn:aws:network-firewall:us-west-2:632480965664:firewall/myfirewall2 +!aws-network-firewall-create-firewall-policy firewall_policy_name=example-fw-policy firewall_policy_json="""{"StatelessRuleGroupReferences":[{"ResourceArn":"arn:aws:network-firewall:us-west-2:123456789012:stateless-rulegroup/example-group-stateless","Priority":100}],"StatelessDefaultActions":["aws:pass"],"StatelessFragmentDefaultActions":["aws:pass"]}""" +!aws-network-firewall-create-firewall firewall_name=myfirewall firewall_policy_arn=arn:aws:network-firewall:us-west-2:123456789012:firewall-policy/example-fw-policy subnet_mappings_subnet_ids=subnet-1787526f,subnet-901becda vpc_id=vpc-a77d05df +!aws-network-firewall-associate-firewall-policy firewall_policy_arn=arn:aws:network-firewall:us-west-2:123456789012:firewall-policy/example-fw-policy firewall_arn=arn:aws:network-firewall:us-west-2:123456789012:firewall/myfirewall +!aws-network-firewall-associate-subnets subnet_mappings_subnet_ids=subnet-1787526f,subnet-901becda firewall_arn=arn:aws:network-firewall:us-west-2:123456789012:firewall/myfirewall2 !aws-network-firewall-disassociate-subnets firewall_name=myfirewall2 subnet_ids=subnet-901becda !aws-network-firewall-describe-firewall firewall_name=myfirewall2 -!aws-network-firewall-describe-firewall-policy firewall_policy_arn=arn:aws:network-firewall:us-west-2:632480965664:firewall-policy/example-fw-policy2 +!aws-network-firewall-describe-firewall-policy firewall_policy_arn=arn:aws:network-firewall:us-west-2:123456789012:firewall-policy/example-fw-policy2 !aws-network-firewall-describe-logging-configuration firewall_name=myfirewall2 -!aws-network-firewall-describe-rule-group rule_group_arn=arn:aws:network-firewall:us-west-2:632480965664:stateless-rulegroup/example-group-stateless2 +!aws-network-firewall-describe-rule-group rule_group_arn=arn:aws:network-firewall:us-west-2:123456789012:stateless-rulegroup/example-group-stateless2 !aws-network-firewall-list-firewall-policies !aws-network-firewall-list-firewalls !aws-network-firewall-list-rule-groups -!aws-network-firewall-tag-resource resource_arn=arn:aws:network-firewall:us-west-2:632480965664:firewall/myfirewall2 tags="key=testkey,value=testvalue" -!aws-network-firewall-list-tags-for-resource resource_arn=arn:aws:network-firewall:us-west-2:632480965664:firewall/myfirewall2 -!aws-network-firewall-untag-resource resource_arn=arn:aws:network-firewall:us-west-2:632480965664:firewall/myfirewall2 tag_keys=testkey -!aws-network-firewall-update-firewall-delete-protection delete_protection=False firewall_arn=arn:aws:network-firewall:us-west-2:632480965664:firewall/myfirewall2 -!aws-network-firewall-update-firewall-description firewall_arn=arn:aws:network-firewall:us-west-2:632480965664:firewall/myfirewall2 description="some description" -!aws-network-firewall-update-firewall-policy-change-protection firewall_policy_change_protection=False firewall_arn=arn:aws:network-firewall:us-west-2:632480965664:firewall/myfirewall2 -!aws-network-firewall-update-subnet-change-protection subnet_change_protection=False firewall_arn=arn:aws:network-firewall:us-west-2:632480965664:firewall/myfirewall2 -!aws-network-firewall-update-firewall-policy update_token=4fa9513c-d33b-4980-8b6e-f00ef7a2af99 firewall_policy_arn=arn:aws:network-firewall:us-west-2:632480965664:firewall-policy/example-fw-policy2 firewall_policy_json="""{"StatelessRuleGroupReferences":[{"ResourceArn":"arn:aws:network-firewall:us-west-2:632480965664:stateless-rulegroup/example-group-stateless","Priority":100}],"StatelessDefaultActions":["aws:pass"],"StatelessFragmentDefaultActions":["aws:pass"]}""" -!aws-network-firewall-update-rule-group rule_group_arn=arn:aws:network-firewall:us-west-2:632480965664:stateless-rulegroup/example-group-stateless2 update_token=049b2760-7b8d-4eb0-a2b1-89012f073da2 rule_group_json="""{"RulesSource":{"StatelessRulesAndCustomActions":{"StatelessRules":[{"RuleDefinition":{"MatchAttributes":{"Sources":[{"AddressDefinition":"10.0.0.0/8"},{"AddressDefinition":"192.168.0.0/16"},{"AddressDefinition":"172.31.0.0/16"}]},"Actions":["aws:pass"]},"Priority":5}]}}}""" -!aws-network-firewall-update-logging-configuration firewall_arn=arn:aws:network-firewall:us-west-2:632480965664:firewall/myfirewall2 logging_configuration_json="{\"LogDestinationConfigs\":[{\"LogType\":\"ALERT\",\"LogDestinationType\":\"S3\",\"LogDestination\":{\"bucketName\":\"xsoar-demo-test-bucket-network-firewall\",\"prefix\":\"alerts\"}}]}" -!aws-network-firewall-put-resource-policy resource_arn=arn:aws:network-firewall:us-west-2:632480965664:stateless-rulegroup/example-group-stateless2 policy="""{"Version":"2012-10-17","Statement":[{"Effect":"Allow","Principal":{"AWS":["632480965664"]},"Action":["network-firewall:CreateFirewallPolicy","network-firewall:UpdateFirewallPolicy","network-firewall:ListRuleGroups"],"Resource":"arn:aws:network-firewall:us-west-2:632480965664:stateless-rulegroup/example-group-stateless2"}]}""" -!aws-network-firewall-describe-resource-policy resource_arn=arn:aws:network-firewall:us-west-2:632480965664:stateless-rulegroup/example-group-stateless2 -!aws-network-firewall-delete-resource-policy resource_arn=arn:aws:network-firewall:us-west-2:632480965664:stateless-rulegroup/example-group-stateless2 +!aws-network-firewall-tag-resource resource_arn=arn:aws:network-firewall:us-west-2:123456789012:firewall/myfirewall2 tags="key=testkey,value=testvalue" +!aws-network-firewall-list-tags-for-resource resource_arn=arn:aws:network-firewall:us-west-2:123456789012:firewall/myfirewall2 +!aws-network-firewall-untag-resource resource_arn=arn:aws:network-firewall:us-west-2:123456789012:firewall/myfirewall2 tag_keys=testkey +!aws-network-firewall-update-firewall-delete-protection delete_protection=False firewall_arn=arn:aws:network-firewall:us-west-2:123456789012:firewall/myfirewall2 +!aws-network-firewall-update-firewall-description firewall_arn=arn:aws:network-firewall:us-west-2:123456789012:firewall/myfirewall2 description="some description" +!aws-network-firewall-update-firewall-policy-change-protection firewall_policy_change_protection=False firewall_arn=arn:aws:network-firewall:us-west-2:123456789012:firewall/myfirewall2 +!aws-network-firewall-update-subnet-change-protection subnet_change_protection=False firewall_arn=arn:aws:network-firewall:us-west-2:123456789012:firewall/myfirewall2 +!aws-network-firewall-update-firewall-policy update_token=4fa9513c-d33b-4980-8b6e-f00ef7a2af99 firewall_policy_arn=arn:aws:network-firewall:us-west-2:123456789012:firewall-policy/example-fw-policy2 firewall_policy_json="""{"StatelessRuleGroupReferences":[{"ResourceArn":"arn:aws:network-firewall:us-west-2:123456789012:stateless-rulegroup/example-group-stateless","Priority":100}],"StatelessDefaultActions":["aws:pass"],"StatelessFragmentDefaultActions":["aws:pass"]}""" +!aws-network-firewall-update-rule-group rule_group_arn=arn:aws:network-firewall:us-west-2:123456789012:stateless-rulegroup/example-group-stateless2 update_token=049b2760-7b8d-4eb0-a2b1-89012f073da2 rule_group_json="""{"RulesSource":{"StatelessRulesAndCustomActions":{"StatelessRules":[{"RuleDefinition":{"MatchAttributes":{"Sources":[{"AddressDefinition":"10.0.0.0/8"},{"AddressDefinition":"192.168.0.0/16"},{"AddressDefinition":"172.31.0.0/16"}]},"Actions":["aws:pass"]},"Priority":5}]}}}""" +!aws-network-firewall-update-logging-configuration firewall_arn=arn:aws:network-firewall:us-west-2:123456789012:firewall/myfirewall2 logging_configuration_json="{\"LogDestinationConfigs\":[{\"LogType\":\"ALERT\",\"LogDestinationType\":\"S3\",\"LogDestination\":{\"bucketName\":\"xsoar-demo-test-bucket-network-firewall\",\"prefix\":\"alerts\"}}]}" +!aws-network-firewall-put-resource-policy resource_arn=arn:aws:network-firewall:us-west-2:123456789012:stateless-rulegroup/example-group-stateless2 policy="""{"Version":"2012-10-17","Statement":[{"Effect":"Allow","Principal":{"AWS":["123456789012"]},"Action":["network-firewall:CreateFirewallPolicy","network-firewall:UpdateFirewallPolicy","network-firewall:ListRuleGroups"],"Resource":"arn:aws:network-firewall:us-west-2:123456789012:stateless-rulegroup/example-group-stateless2"}]}""" +!aws-network-firewall-describe-resource-policy resource_arn=arn:aws:network-firewall:us-west-2:123456789012:stateless-rulegroup/example-group-stateless2 +!aws-network-firewall-delete-resource-policy resource_arn=arn:aws:network-firewall:us-west-2:123456789012:stateless-rulegroup/example-group-stateless2 !aws-network-firewall-delete-firewall firewall_name=myfirewall -!aws-network-firewall-delete-firewall-policy firewall_policy_arn=arn:aws:network-firewall:us-west-2:632480965664:firewall-policy/example-fw-policy2 -!aws-network-firewall-delete-rule-group rule_group_arn=arn:aws:network-firewall:us-west-2:632480965664:stateless-rulegroup/example-group-stateless3 +!aws-network-firewall-delete-firewall-policy firewall_policy_arn=arn:aws:network-firewall:us-west-2:123456789012:firewall-policy/example-fw-policy2 +!aws-network-firewall-delete-rule-group rule_group_arn=arn:aws:network-firewall:us-west-2:123456789012:stateless-rulegroup/example-group-stateless3 diff --git a/Packs/AWS-NetworkFirewall/ReleaseNotes/1_0_7.md b/Packs/AWS-NetworkFirewall/ReleaseNotes/1_0_7.md new file mode 100644 index 000000000000..11706bd0df2c --- /dev/null +++ b/Packs/AWS-NetworkFirewall/ReleaseNotes/1_0_7.md @@ -0,0 +1,6 @@ + +#### Integrations + +##### AWS Network Firewall +- Updated the Docker image to: *demisto/boto3py3:1.0.0.95377*. +- Fixed an issue where test connection was failing. diff --git a/Packs/AWS-NetworkFirewall/pack_metadata.json b/Packs/AWS-NetworkFirewall/pack_metadata.json index cf8701dd5fbd..c89f169fe11c 100644 --- a/Packs/AWS-NetworkFirewall/pack_metadata.json +++ b/Packs/AWS-NetworkFirewall/pack_metadata.json @@ -2,7 +2,7 @@ "name": "AWS - Network Firewall", "description": "Amazon Web Services Network Firewall", "support": "xsoar", - "currentVersion": "1.0.6", + "currentVersion": "1.0.7", "author": "Cortex XSOAR", "url": "https://www.paloaltonetworks.com/cortex", "email": "", diff --git a/Packs/AWS-Route53/ReleaseNotes/1_1_33.md b/Packs/AWS-Route53/ReleaseNotes/1_1_33.md new file mode 100644 index 000000000000..847d3668bdca --- /dev/null +++ b/Packs/AWS-Route53/ReleaseNotes/1_1_33.md @@ -0,0 +1,3 @@ +## AWS - Route53 + +- Locked dependencies of the pack to ensure stability for versioned core packs. No changes in this release. diff --git a/Packs/AWS-Route53/pack_metadata.json b/Packs/AWS-Route53/pack_metadata.json index 05c0d819ada8..13e225c7b4c1 100644 --- a/Packs/AWS-Route53/pack_metadata.json +++ b/Packs/AWS-Route53/pack_metadata.json @@ -2,7 +2,7 @@ "name": "AWS - Route53", "description": "Amazon Web Services Managed Cloud DNS Service.", "support": "xsoar", - "currentVersion": "1.1.32", + "currentVersion": "1.1.33", "author": "Cortex XSOAR", "url": "https://www.paloaltonetworks.com/cortex", "email": "", diff --git a/Packs/AWS-S3/ReleaseNotes/1_2_23.md b/Packs/AWS-S3/ReleaseNotes/1_2_23.md new file mode 100644 index 000000000000..b530392bde3a --- /dev/null +++ b/Packs/AWS-S3/ReleaseNotes/1_2_23.md @@ -0,0 +1,3 @@ +## AWS - S3 + +- Locked dependencies of the pack to ensure stability for versioned core packs. No changes in this release. diff --git a/Packs/AWS-S3/pack_metadata.json b/Packs/AWS-S3/pack_metadata.json index 1f25fc369627..cc2db1a8eb35 100644 --- a/Packs/AWS-S3/pack_metadata.json +++ b/Packs/AWS-S3/pack_metadata.json @@ -2,7 +2,7 @@ "name": "AWS - S3", "description": "Amazon Web Services Simple Storage Service (S3)", "support": "xsoar", - "currentVersion": "1.2.22", + "currentVersion": "1.2.23", "author": "Cortex XSOAR", "url": "https://www.paloaltonetworks.com/cortex", "email": "", diff --git a/Packs/AWS-SNS-Listener/Integrations/AWSSNSListener/AWSSNSListener.yml b/Packs/AWS-SNS-Listener/Integrations/AWSSNSListener/AWSSNSListener.yml index 2d1b3de7c118..8516532da9b4 100644 --- a/Packs/AWS-SNS-Listener/Integrations/AWSSNSListener/AWSSNSListener.yml +++ b/Packs/AWS-SNS-Listener/Integrations/AWSSNSListener/AWSSNSListener.yml @@ -14,7 +14,7 @@ configuration: section: Connect advanced: true required: false -- additionalinfo: "Runs the service on this port from within Cortex XSOAR. Requires a unique port for each long-running integration instance. Do not use the same port for multiple instances. Note: If you click the test button more than once, a failure may occur mistakenly indicating that the port is already in use. (For Cortex XSOAR 8 and Cortex XSIAM) If you do not enter a Listen Port, an unused port for AWS SNS Listener will automatically be generated when the instance is saved. However, if using an engine, you must enter a Listen Port." +- additionalinfo: "Runs the service on this port from within Cortex XSOAR. Requires a unique port for each long-running integration instance. Do not use the same port for multiple instances. Note: If you click the test button more than once, a failure may occur mistakenly indicating that the port is already in use." display: Listen Port name: longRunningPort type: 0 @@ -61,7 +61,7 @@ display: AWS-SNS-Listener name: AWS-SNS-Listener script: commands: [] - dockerimage: demisto/fastapi:1.0.0.93128 + dockerimage: demisto/fastapi:0.110.3.93571 longRunning: true longRunningPort: true script: '-' diff --git a/Packs/AWS-SNS-Listener/Integrations/AWSSNSListener/README.md b/Packs/AWS-SNS-Listener/Integrations/AWSSNSListener/README.md index 2b79ac2ad334..ef72ff9a8afe 100644 --- a/Packs/AWS-SNS-Listener/Integrations/AWSSNSListener/README.md +++ b/Packs/AWS-SNS-Listener/Integrations/AWSSNSListener/README.md @@ -10,7 +10,7 @@ This integration was integrated and tested with version January 2024 of AWS-SNS- | **Parameter** | **Description** | **Required** | | --- | --- | --- | | Long running instance | Integration is long running by default| | - | Listen Port | Runs the service on this port from within Cortex XSOAR. Requires a unique port for each long-running integration instance. Do not use the same port for multiple instances. Note: If you click the test button more than once, a failure may occur mistakenly indicating that the port is already in use. \(For Cortex XSOAR 8 and Cortex XSIAM\) If you do not enter a Listen Port, an unused port for AWS SNS Listener will automatically be generated when the instance is saved. However, if using an engine, you must enter a Listen Port. | False | + | Listen Port | Runs the service on this port from within Cortex XSOAR. Requires a unique port for each long-running integration instance. Do not use the same port for multiple instances. Note: If you click the test button more than once, a failure may occur mistakenly indicating that the port is already in use. | False | | Username | Uses basic authentication for accessing the list. If empty, no authentication is enforced. \(For Cortex XSOAR 8 and Cortex XSIAM\) Optional for engines, otherwise mandatory. | False | | Password | | False | | Endpoint | Set the endpoint of your listener. example: /snsv2 | False | @@ -21,7 +21,8 @@ This integration was integrated and tested with version January 2024 of AWS-SNS- 4. Click **Test** to validate the URLs, token, and connection. + ## Commands You can execute these commands from the Cortex XSOAR CLI, as part of an automation, or in a playbook. -After you successfully execute a command, a DBot message appears in the War Room with the command details. +After you successfully execute a command, a DBot message appears in the War Room with the command details. \ No newline at end of file diff --git a/Packs/AWS-SNS-Listener/ReleaseNotes/1_0_3.md b/Packs/AWS-SNS-Listener/ReleaseNotes/1_0_3.md new file mode 100644 index 000000000000..f30354bbc1ae --- /dev/null +++ b/Packs/AWS-SNS-Listener/ReleaseNotes/1_0_3.md @@ -0,0 +1,7 @@ + +#### Integrations + +##### AWS-SNS-Listener + +- Updated the Docker image to: *demisto/fastapi:0.110.3.93571*. +- Documentation and metadata improvements. diff --git a/Packs/AWS-SNS-Listener/TestPlaybooks/AWS_SNS_Listener_-_Test.yml b/Packs/AWS-SNS-Listener/TestPlaybooks/AWS_SNS_Listener_-_Test.yml index ebeef18db1ac..05dac34ba55e 100644 --- a/Packs/AWS-SNS-Listener/TestPlaybooks/AWS_SNS_Listener_-_Test.yml +++ b/Packs/AWS-SNS-Listener/TestPlaybooks/AWS_SNS_Listener_-_Test.yml @@ -51,11 +51,11 @@ tasks: scriptarguments: body: simple: '{"Type": "Notification", "MessageId": "8b2d1fb3-991d-5620-94db-4122378cde65", - "TopicArn": "arn:aws:sns:eu-central-1:794065701450:demisto", "Subject": + "TopicArn": "arn:aws:sns:eu-central-1:123456789012:demisto", "Subject": "SNS-test-subject", "Message": "SNS-test-subject", "Timestamp": "2024-04-11T09:24:33.550Z", "SignatureVersion": "1", "Signature": "URJNiNbbC3YJAiAL8UGzyTfbwTEh1yKgx1uPdLQ7lPRt5UmQhlgjCsuh76Tjl6IB9bTHrqsWznC6yKYXEABNMnJxpMx8EssnofmmTH+5sEB5nd/yz9U8qMCPP22mSmkGtkAAFwxi0fBo9Pg3mzhitO2l7yo3v//V4KVQu03ggbSqTXnLZNXpN3sAjgrKFfye8x9OJ0NGMdDWrnFOEYMbJEGgetkxjJV/H7L+BCz01W1V+xjPFzbfmWFIyKArjEv7cI2dytQBdIf9QhB7zOYIpKPNz/NMp0a+3e/hxt2rarBLx/sqii5yWDCT/jAVAWBNyv6ZiGQwScWgjuu8ICnzDQ==", "SigningCertURL": "https://sns.eu-central-1.amazonaws.com/SimpleNotificationService-60eadc530605d63b8e62a523676ef735.pem", - "UnsubscribeURL": "https://sns.eu-central-1.amazonaws.com/?Action=Unsubscribe&SubscriptionArn=arn:aws:sns:eu-central-1:794065701450:demisto:81273f59-b8ce-423b-842c-31cc19b81cb9"}' + "UnsubscribeURL": "https://sns.eu-central-1.amazonaws.com/?Action=Unsubscribe&SubscriptionArn=arn:aws:sns:eu-central-1:123456789012:demisto:81273f59-b8ce-423b-842c-31cc19b81cb9"}' headers: simple: x-amz-sns-message-type:Notification method: @@ -366,8 +366,8 @@ tasks: task: id: 571fff3d-701c-4697-87b4-5645267da22c version: -1 - name: GetServerInfo_copy - scriptName: GetServerInfo_copy + name: GetServerInfo + scriptName: GetServerInfo type: regular iscommand: false brand: "" diff --git a/Packs/AWS-SNS-Listener/TestPlaybooks/script-GetWebhookUrl.yml b/Packs/AWS-SNS-Listener/TestPlaybooks/script-GetWebhookUrl.yml deleted file mode 100644 index fe6035fe7199..000000000000 --- a/Packs/AWS-SNS-Listener/TestPlaybooks/script-GetWebhookUrl.yml +++ /dev/null @@ -1,20 +0,0 @@ -commonfields: - id: GetServerInfo_copy - version: -1 -name: GetServerInfo_copy -script: >- - urls = demisto.demistoUrls() - - outputs = {'urls' : urls, 'version': demisto.demistoVersion()} - - command_results = CommandResults(outputs_prefix='ServerInfo',outputs=outputs) - - return_results(command_results) -type: python -tags: [] -enabled: true -scripttarget: 0 -subtype: python3 -runonce: false -dockerimage: demisto/python3:3.10.14.92207 -fromversion: 5.0.0 diff --git a/Packs/AWS-SNS-Listener/pack_metadata.json b/Packs/AWS-SNS-Listener/pack_metadata.json index 1c7cb2e27e4d..f5ace54c2535 100644 --- a/Packs/AWS-SNS-Listener/pack_metadata.json +++ b/Packs/AWS-SNS-Listener/pack_metadata.json @@ -2,7 +2,7 @@ "name": "AWS-SNS-Listener", "description": "A long running AWS SNS Listener service that can subscribe to an SNS topic and create incidents from the messages received.", "support": "xsoar", - "currentVersion": "1.0.2", + "currentVersion": "1.0.3", "author": "Cortex XSOAR", "url": "https://www.paloaltonetworks.com/cortex", "email": "", diff --git a/Packs/AWS-SecurityHub/Integrations/AWSSecurityHubEventCollector/AWSSecurityHubEventCollector.py b/Packs/AWS-SecurityHub/Integrations/AWSSecurityHubEventCollector/AWSSecurityHubEventCollector.py index 09b6361f95e5..80142c761085 100644 --- a/Packs/AWS-SecurityHub/Integrations/AWSSecurityHubEventCollector/AWSSecurityHubEventCollector.py +++ b/Packs/AWS-SecurityHub/Integrations/AWSSecurityHubEventCollector/AWSSecurityHubEventCollector.py @@ -113,7 +113,7 @@ def get_events(client: "SecurityHubClient", start_time: dt.datetime | None = Non result = [event for event in result if event['Id'] not in id_ignore_set] count += len(result) - yield result + yield result # type: ignore if 'NextToken' in response and (limit == 0 or count < limit): kwargs['NextToken'] = response['NextToken'] diff --git a/Packs/AWS-SecurityHub/Integrations/AWSSecurityHubEventCollector/README.md b/Packs/AWS-SecurityHub/Integrations/AWSSecurityHubEventCollector/README.md index 4051699216e5..2772c6e1fd86 100644 --- a/Packs/AWS-SecurityHub/Integrations/AWSSecurityHubEventCollector/README.md +++ b/Packs/AWS-SecurityHub/Integrations/AWSSecurityHubEventCollector/README.md @@ -1,5 +1,9 @@ An XSIAM event collector for AWS Security Hub. +<~XSIAM> +This is the default integration for this content pack when configured by the Data Onboarder. + + ## Configure AWS Security Hub Event Collector on Cortex XSIAM 1. Navigate to **Settings** > **Configurations** > **Data Collection** > **Automations & Feed Integrations**. diff --git a/Packs/AWS-SecurityHub/Integrations/AWS_SecurityHub/README.md b/Packs/AWS-SecurityHub/Integrations/AWS_SecurityHub/README.md index 997f2d883dad..db2b9a709913 100644 --- a/Packs/AWS-SecurityHub/Integrations/AWS_SecurityHub/README.md +++ b/Packs/AWS-SecurityHub/Integrations/AWS_SecurityHub/README.md @@ -161,7 +161,7 @@ Used by Security Hub customers to update information about their investigation i ##### Command Example -```!aws-securityhub-batch-update-findings finding_identifiers_id='arn:aws:securityhub:eu-west-1:676921422616:subscription/aws-foundational-security-best-practices/v/1.0.0/S3.1/finding/a2ee641f-aec2-4356-a1b6-656cce03be4e' finding_identifiers_product_arn='arn:aws:securityhub:eu-west-1::product/aws/securityhub' note_text=test note_updated_by=Demisto``` +```!aws-securityhub-batch-update-findings finding_identifiers_id='arn:aws:securityhub:eu-west-1:123456789012:subscription/aws-foundational-security-best-practices/v/1.0.0/S3.1/finding/a2ee641f-aec2-4356-a1b6-656cce03be4e' finding_identifiers_product_arn='arn:aws:securityhub:eu-west-1::product/aws/securityhub' note_text=test note_updated_by=Demisto``` ##### Context Example ``` @@ -174,7 +174,7 @@ Used by Security Hub customers to update information about their investigation i ### AWS SecurityHub BatchUpdateFindings |ProcessedFindings|UnprocessedFindings| |---|---| -| | {'FindingIdentifier': {'Id': "'arn:aws:securityhub:eu-west-1:676921422616:subscription/aws-foundational-security-best-practices/v/1.0.0/S3.1/finding/a2ee641f-aec2-4356-a1b6-656cce03be4e'", 'ProductArn': "'arn:aws:securityhub:eu-west-1::product/aws/securityhub'"}, 'ErrorCode': 'FindingNotFound', 'ErrorMessage': 'Finding Not Found'} | +| | {'FindingIdentifier': {'Id': "'arn:aws:securityhub:eu-west-1:123456789012:subscription/aws-foundational-security-best-practices/v/1.0.0/S3.1/finding/a2ee641f-aec2-4356-a1b6-656cce03be4e'", 'ProductArn': "'arn:aws:securityhub:eu-west-1::product/aws/securityhub'"}, 'ErrorCode': 'FindingNotFound', 'ErrorMessage': 'Finding Not Found'} | ### 3. aws-securityhub-enable-security-hub @@ -612,14 +612,14 @@ Returns a list of findings that match the specified criteria. "ProductArn": "arn:aws:securityhub:eu-west-1::product/aws/securityhub", "ProductFields": { "aws/securityhub/SeverityLabel": "MEDIUM", - "StandardsGuideSubscriptionArn": "arn:aws:securityhub:eu-west-1:676921422616:subscription/cis-aws-foundations-benchmark/v/1.2.0", + "StandardsGuideSubscriptionArn": "arn:aws:securityhub:eu-west-1:123456789012:subscription/cis-aws-foundations-benchmark/v/1.2.0", "RecommendationUrl": "https://docs.aws.amazon.com/console/securityhub/standards-cis-1.8/remediation", "RuleId": "1.8", "RelatedAWSResources:0/name": "securityhub-iam-password-policy-number-check-a08618e1", - "StandardsControlArn": "arn:aws:securityhub:eu-west-1:676921422616:control/cis-aws-foundations-benchmark/v/1.2.0/1.8", + "StandardsControlArn": "arn:aws:securityhub:eu-west-1:123456789012:control/cis-aws-foundations-benchmark/v/1.2.0/1.8", "RelatedAWSResources:0/type": "AWS::Config::ConfigRule", "aws/securityhub/ProductName": "Security Hub", - "aws/securityhub/FindingId": "arn:aws:securityhub:eu-west-1::product/aws/securityhub/arn:aws:securityhub:eu-west-1:676921422616:subscription/cis-aws-foundations-benchmark/v/1.2.0/1.8/finding/d1d15683-7fbd-4b82-8eed-3af50785cdf6", + "aws/securityhub/FindingId": "arn:aws:securityhub:eu-west-1::product/aws/securityhub/arn:aws:securityhub:eu-west-1:123456789012:subscription/cis-aws-foundations-benchmark/v/1.2.0/1.8/finding/d1d15683-7fbd-4b82-8eed-3af50785cdf6", "aws/securityhub/annotation": "Unable to describe the supporting AWS Config Rule, Please verify that you have enabled AWS Config.", "StandardsGuideArn": "arn:aws:securityhub:::ruleset/cis-aws-foundations-benchmark/v/1.2.0", "aws/securityhub/CompanyName": "AWS" @@ -630,7 +630,7 @@ Returns a list of findings that match the specified criteria. "Region": "eu-west-1", "Partition": "aws", "Type": "AwsAccount", - "Id": "AWS::::Account:676921422616" + "Id": "AWS::::Account:123456789012" } ], "Types": [ @@ -650,9 +650,9 @@ Returns a list of findings that match the specified criteria. "Original": "MEDIUM", "Label": "MEDIUM" }, - "Id": "arn:aws:securityhub:eu-west-1:676921422616:subscription/cis-aws-foundations-benchmark/v/1.2.0/1.8/finding/d1d15683-7fbd-4b82-8eed-3af50785cdf6", + "Id": "arn:aws:securityhub:eu-west-1:123456789012:subscription/cis-aws-foundations-benchmark/v/1.2.0/1.8/finding/d1d15683-7fbd-4b82-8eed-3af50785cdf6", "CreatedAt": "2020-07-05T13:14:29.111Z", - "AwsAccountId": "676921422616" + "AwsAccountId": "123456789012" } ] } @@ -662,7 +662,7 @@ Returns a list of findings that match the specified criteria. ### AWS SecurityHub GetFindings |AwsAccountId|Compliance|CreatedAt|Description|FirstObservedAt|GeneratorId|Id|LastObservedAt|ProductArn|ProductFields|RecordState|Remediation|Resources|SchemaVersion|Severity|Title|Types|UpdatedAt|Workflow|WorkflowState| |---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---| -| 676921422616 | Status: WARNING,StatusReasons: {'ReasonCode': 'CONFIG_ACCESS_DENIED', 'Description': 'Unable to describe the supporting AWS Config Rule, Please verify that you have enabled AWS Config.'} | 2020-07-05T13:14:29.111Z | Password policies are, in part, used to enforce password complexity requirements. IAM password policies can be used to ensure passwords are comprised of different character sets. It is recommended that the password policy require at least one number. | 2020-07-05T13:14:29.111Z | arn:aws:securityhub:::ruleset/cis-aws-foundations-benchmark/v/1.2.0/rule/1.8 | arn:aws:securityhub:eu-west-1:676921422616:subscription/cis-aws-foundations-benchmark/v/1.2.0/1.8/finding/d1d15683-7fbd-4b82-8eed-3af50785cdf6 | 2020-07-22T11:30:13.952Z | arn:aws:securityhub:eu-west-1::product/aws/securityhub | StandardsGuideArn: arn:aws:securityhub:::ruleset/cis-aws-foundations-benchmark/v/1.2.0,StandardsGuideSubscriptionArn: arn:aws:securityhub:eu-west-1:676921422616:subscription/cis-aws-foundations-benchmark/v/1.2.0,RuleId: 1.8,RecommendationUrl: https://docs.aws.amazon.com/console/securityhub/standards-cis-1.8/remediation,RelatedAWSResources:0/name: securityhub-iam-password-policy-number-check-a08618e1,RelatedAWSResources:0/type: AWS::Config::ConfigRule,StandardsControlArn: arn:aws:securityhub:eu-west-1:676921422616:control/cis-aws-foundations-benchmark/v/1.2.0/1.8,aws/securityhub/SeverityLabel: MEDIUM,aws/securityhub/ProductName: Security Hub,aws/securityhub/CompanyName: AWS,aws/securityhub/annotation: Unable to describe the supporting AWS Config Rule, Please verify that you have enabled AWS Config.,aws/securityhub/FindingId: arn:aws:securityhub:eu-west-1::product/aws/securityhub/arn:aws:securityhub:eu-west-1:676921422616:subscription/cis-aws-foundations-benchmark/v/1.2.0/1.8/finding/d1d15683-7fbd-4b82-8eed-3af50785cdf6 | ACTIVE | Recommendation: {"Text": "For directions on how to fix this issue, please consult the AWS Security Hub CIS documentation.", "Url": "https://docs.aws.amazon.com/console/securityhub/standards-cis-1.8/remediation"} | {'Type': 'AwsAccount', 'Id': 'AWS::::Account:676921422616', 'Partition': 'aws', 'Region': 'eu-west-1'} | 2018-10-08 | Product: 40,Label: MEDIUM,Normalized: 40,Original: MEDIUM | 1.8 Ensure IAM password policy requires at least one number | Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark | 2020-07-22T11:28:46.637Z | Status: NEW | NEW | +| 123456789012 | Status: WARNING,StatusReasons: {'ReasonCode': 'CONFIG_ACCESS_DENIED', 'Description': 'Unable to describe the supporting AWS Config Rule, Please verify that you have enabled AWS Config.'} | 2020-07-05T13:14:29.111Z | Password policies are, in part, used to enforce password complexity requirements. IAM password policies can be used to ensure passwords are comprised of different character sets. It is recommended that the password policy require at least one number. | 2020-07-05T13:14:29.111Z | arn:aws:securityhub:::ruleset/cis-aws-foundations-benchmark/v/1.2.0/rule/1.8 | arn:aws:securityhub:eu-west-1:123456789012:subscription/cis-aws-foundations-benchmark/v/1.2.0/1.8/finding/d1d15683-7fbd-4b82-8eed-3af50785cdf6 | 2020-07-22T11:30:13.952Z | arn:aws:securityhub:eu-west-1::product/aws/securityhub | StandardsGuideArn: arn:aws:securityhub:::ruleset/cis-aws-foundations-benchmark/v/1.2.0,StandardsGuideSubscriptionArn: arn:aws:securityhub:eu-west-1:123456789012:subscription/cis-aws-foundations-benchmark/v/1.2.0,RuleId: 1.8,RecommendationUrl: https://docs.aws.amazon.com/console/securityhub/standards-cis-1.8/remediation,RelatedAWSResources:0/name: securityhub-iam-password-policy-number-check-a08618e1,RelatedAWSResources:0/type: AWS::Config::ConfigRule,StandardsControlArn: arn:aws:securityhub:eu-west-1:123456789012:control/cis-aws-foundations-benchmark/v/1.2.0/1.8,aws/securityhub/SeverityLabel: MEDIUM,aws/securityhub/ProductName: Security Hub,aws/securityhub/CompanyName: AWS,aws/securityhub/annotation: Unable to describe the supporting AWS Config Rule, Please verify that you have enabled AWS Config.,aws/securityhub/FindingId: arn:aws:securityhub:eu-west-1::product/aws/securityhub/arn:aws:securityhub:eu-west-1:123456789012:subscription/cis-aws-foundations-benchmark/v/1.2.0/1.8/finding/d1d15683-7fbd-4b82-8eed-3af50785cdf6 | ACTIVE | Recommendation: {"Text": "For directions on how to fix this issue, please consult the AWS Security Hub CIS documentation.", "Url": "https://docs.aws.amazon.com/console/securityhub/standards-cis-1.8/remediation"} | {'Type': 'AwsAccount', 'Id': 'AWS::::Account:123456789012', 'Partition': 'aws', 'Region': 'eu-west-1'} | 2018-10-08 | Product: 40,Label: MEDIUM,Normalized: 40,Original: MEDIUM | 1.8 Ensure IAM password policy requires at least one number | Software and Configuration Checks/Industry and Regulatory Standards/CIS AWS Foundations Benchmark | 2020-07-22T11:28:46.637Z | Status: NEW | NEW | ### 5. aws-securityhub-get-master-account --- diff --git a/Packs/AWS-SecurityHub/Integrations/AWS_SecurityHub/commands.txt b/Packs/AWS-SecurityHub/Integrations/AWS_SecurityHub/commands.txt index 5df97c56f112..dff3683fc64a 100644 --- a/Packs/AWS-SecurityHub/Integrations/AWS_SecurityHub/commands.txt +++ b/Packs/AWS-SecurityHub/Integrations/AWS_SecurityHub/commands.txt @@ -1,4 +1,4 @@ -!aws-securityhub-batch-update-findings finding_identifiers_id='arn:aws:securityhub:eu-west-1:676921422616:subscription/aws-foundational-security-best-practices/v/1.0.0/S3.1/finding/a2ee641f-aec2-4356-a1b6-656cce03be4e' finding_identifiers_product_arn='arn:aws:securityhub:eu-west-1::product/aws/securityhub' note_text=test note_updated_by=Demisto +!aws-securityhub-batch-update-findings finding_identifiers_id='arn:aws:securityhub:eu-west-1:123456789012:subscription/aws-foundational-security-best-practices/v/1.0.0/S3.1/finding/a2ee641f-aec2-4356-a1b6-656cce03be4e' finding_identifiers_product_arn='arn:aws:securityhub:eu-west-1::product/aws/securityhub' note_text=test note_updated_by=Demisto !aws-securityhub-disable-security-hub !aws-securityhub-enable-security-hub !aws-securityhub-get-findings diff --git a/Packs/AWS-SecurityHub/pack_metadata.json b/Packs/AWS-SecurityHub/pack_metadata.json index 5220455d3a56..2bd7bc38249c 100644 --- a/Packs/AWS-SecurityHub/pack_metadata.json +++ b/Packs/AWS-SecurityHub/pack_metadata.json @@ -21,5 +21,6 @@ ], "itemPrefix": [ "AWS Security Hub" - ] + ], + "defaultDataSource": "AWS Security Hub Event Collector" } \ No newline at end of file diff --git a/Packs/AWS-SecurityLake/Integrations/AWSSecurityLake/test_data/expected_context/get_query_results_command.json b/Packs/AWS-SecurityLake/Integrations/AWSSecurityLake/test_data/expected_context/get_query_results_command.json index fd468e25eb31..e554232bc1f7 100644 --- a/Packs/AWS-SecurityLake/Integrations/AWSSecurityLake/test_data/expected_context/get_query_results_command.json +++ b/Packs/AWS-SecurityLake/Integrations/AWSSecurityLake/test_data/expected_context/get_query_results_command.json @@ -7,7 +7,7 @@ "actor": "{user={type=AWSService, name=null, uid=null, uuid=null, account_uid=null, credential_uid=null}, session={created_time=null, mfa=null, issuer=null}, invoked_by=s3.amazonaws.com, idp={name=null}}", "http_request": "{user_agent=s3.amazonaws.com}", "src_endpoint": "{uid=vpce-59a25a30, ip=null, domain=s3.amazonaws.com}", - "resources": "[{uid=arn:aws:s3:::test-log-collector/2023-11-07-07-40-08-CD1B1BC0934C71D4, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::test-log-collector, account_uid=654338056632, type=AWS::S3::Bucket}]", + "resources": "[{uid=arn:aws:s3:::test-log-collector/2023-11-07-07-40-08-CD1B1BC0934C71D4, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::test-log-collector, account_uid=123456789012, type=AWS::S3::Bucket}]", "class_name": "API Activity", "class_uid": "3005", "category_name": "Audit Activity", @@ -20,9 +20,9 @@ "type_name": "API Activity: Update", "status": "Success", "status_id": "1", - "unmapped": "{additionalEventData.SSEApplied=Default_SSE_S3, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, responseElements.x-amz-version-id=CSwJV1zFxiBSHjrTGaIUH5FMMVcgl05W, additionalEventData.x-amz-id-2=71IAMvFJ3O5bJRBlpJCB3l0B8CzNy5sG7BECw2I1R4oHyvkV2FKixZqmYAir4Y5h5ldtxWA+xlk=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=s3.eu-central-1.amazonaws.com, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=6ffdc2dd-05ca-483e-8baa-842949fddced, requestParameters.key=2023-11-07-07-40-08-CD1B1BC0934C71D4, requestParameters.bucketName=test-log-collector, responseElements.x-amz-server-side-encryption=AES256, responseElements.x-amz-expiration=expiry-date=\"Wed, 15 Nov 2023 00:00:00 GMT\", rule-id=\"Delete-older-the-7-days\", recipientAccountId=654338056632, managementEvent=false, additionalEventData.bytesTransferredIn=480}", + "unmapped": "{additionalEventData.SSEApplied=Default_SSE_S3, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, responseElements.x-amz-version-id=CSwJV1zFxiBSHjrTGaIUH5FMMVcgl05W, additionalEventData.x-amz-id-2=71IAMvFJ3O5bJRBlpJCB3l0B8CzNy5sG7BECw2I1R4oHyvkV2FKixZqmYAir4Y5h5ldtxWA+xlk=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=s3.eu-central-1.amazonaws.com, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=6ffdc2dd-05ca-483e-8baa-842949fddced, requestParameters.key=2023-11-07-07-40-08-CD1B1BC0934C71D4, requestParameters.bucketName=test-log-collector, responseElements.x-amz-server-side-encryption=AES256, responseElements.x-amz-expiration=expiry-date=\"Wed, 15 Nov 2023 00:00:00 GMT\", rule-id=\"Delete-older-the-7-days\", recipientAccountId=123456789012, managementEvent=false, additionalEventData.bytesTransferredIn=480}", "region": "eu-central-1", - "accountid": "654338056632", + "accountid": "123456789012", "eventday": "20231107", "query_execution_id": "b3c194e7-6580-421c-81fa-4b409e1ba04f" }, @@ -34,7 +34,7 @@ "actor": "{user={type=AWSService, name=null, uid=null, uuid=null, account_uid=null, credential_uid=null}, session={created_time=null, mfa=null, issuer=null}, invoked_by=cloudtrail.amazonaws.com, idp={name=null}}", "http_request": "{user_agent=cloudtrail.amazonaws.com}", "src_endpoint": "{uid=null, ip=null, domain=cloudtrail.amazonaws.com}", - "resources": "[{uid=arn:aws:s3:::aws-cloudtrail-logs-654338056632-cloudlake/AWSLogs/o-re4vuxlksb/654338056632/CloudTrail/eu-central-1/2023/11/07/654338056632_CloudTrail_eu-central-1_20231107T0740Z_dimNGWjAynOPPS1e.json.gz, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::aws-cloudtrail-logs-654338056632-cloudlake, account_uid=654338056632, type=AWS::S3::Bucket}]", + "resources": "[{uid=arn:aws:s3:::aws-cloudtrail-logs-123456789012-cloudlake/AWSLogs/o-re4vuxlksb/123456789012/CloudTrail/eu-central-1/2023/11/07/123456789012_CloudTrail_eu-central-1_20231107T0740Z_dimNGWjAynOPPS1e.json.gz, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::aws-cloudtrail-logs-123456789012-cloudlake, account_uid=123456789012, type=AWS::S3::Bucket}]", "class_name": "API Activity", "class_uid": "3005", "category_name": "Audit Activity", @@ -47,9 +47,9 @@ "type_name": "API Activity: Update", "status": "Success", "status_id": "1", - "unmapped": "{additionalEventData.SSEApplied=SSE_S3, requestParameters.x-amz-acl=bucket-owner-full-control, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, additionalEventData.x-amz-id-2=CKIV9mfKLdZHOitE2UvDr21Km4whgr92dar3i5Ew4/upKPfXc97MP45lpxnGo1mhPW7RQgvuQEs=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=aws-cloudtrail-logs-654338056632-cloudlake.s3.eu-central-1.amazonaws.com, requestParameters.x-amz-server-side-encryption=AES256, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=cd4f5d26-4491-40fc-a7d9-cb10f3e99ed3, requestParameters.key=AWSLogs/o-re4vuxlksb/654338056632/CloudTrail/eu-central-1/2023/11/07/654338056632_CloudTrail_eu-central-1_20231107T0740Z_dimNGWjAynOPPS1e.json.gz, requestParameters.bucketName=aws-cloudtrail-logs-654338056632-cloudlake, responseElements.x-amz-server-side-encryption=AES256, recipientAccountId=654338056632, managementEvent=false, additionalEventData.bytesTransferredIn=2347}", + "unmapped": "{additionalEventData.SSEApplied=SSE_S3, requestParameters.x-amz-acl=bucket-owner-full-control, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, additionalEventData.x-amz-id-2=CKIV9mfKLdZHOitE2UvDr21Km4whgr92dar3i5Ew4/upKPfXc97MP45lpxnGo1mhPW7RQgvuQEs=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=aws-cloudtrail-logs-123456789012-cloudlake.s3.eu-central-1.amazonaws.com, requestParameters.x-amz-server-side-encryption=AES256, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=cd4f5d26-4491-40fc-a7d9-cb10f3e99ed3, requestParameters.key=AWSLogs/o-re4vuxlksb/123456789012/CloudTrail/eu-central-1/2023/11/07/123456789012_CloudTrail_eu-central-1_20231107T0740Z_dimNGWjAynOPPS1e.json.gz, requestParameters.bucketName=aws-cloudtrail-logs-123456789012-cloudlake, responseElements.x-amz-server-side-encryption=AES256, recipientAccountId=123456789012, managementEvent=false, additionalEventData.bytesTransferredIn=2347}", "region": "eu-central-1", - "accountid": "654338056632", + "accountid": "123456789012", "eventday": "20231107", "query_execution_id": "b3c194e7-6580-421c-81fa-4b409e1ba04f" }, @@ -61,7 +61,7 @@ "actor": "{user={type=AWSService, name=null, uid=null, uuid=null, account_uid=null, credential_uid=null}, session={created_time=null, mfa=null, issuer=null}, invoked_by=s3.amazonaws.com, idp={name=null}}", "http_request": "{user_agent=s3.amazonaws.com}", "src_endpoint": "{uid=vpce-59a25a30, ip=null, domain=s3.amazonaws.com}", - "resources": "[{uid=arn:aws:s3:::test-log-collector/2023-11-07-07-39-32-43DC7FEEAE7DFCA6, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::test-log-collector, account_uid=654338056632, type=AWS::S3::Bucket}]", + "resources": "[{uid=arn:aws:s3:::test-log-collector/2023-11-07-07-39-32-43DC7FEEAE7DFCA6, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::test-log-collector, account_uid=123456789012, type=AWS::S3::Bucket}]", "class_name": "API Activity", "class_uid": "3005", "category_name": "Audit Activity", @@ -74,9 +74,9 @@ "type_name": "API Activity: Update", "status": "Success", "status_id": "1", - "unmapped": "{additionalEventData.SSEApplied=Default_SSE_S3, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, responseElements.x-amz-version-id=LXBDlV4KJJB56OUOvwPNlb1v4Re5gpKH, additionalEventData.x-amz-id-2=0/+WsTcJgOlNKdHK/L2FmlK7IyBHUCeZDaXhRKtYAlWrPy0oTMVVtX41yxCh3HE5s2YMLFpwyGc=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=s3.eu-central-1.amazonaws.com, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=ae9c5147-998a-4a83-ae76-dd1916795763, requestParameters.key=2023-11-07-07-39-32-43DC7FEEAE7DFCA6, requestParameters.bucketName=test-log-collector, responseElements.x-amz-server-side-encryption=AES256, responseElements.x-amz-expiration=expiry-date=\"Wed, 15 Nov 2023 00:00:00 GMT\", rule-id=\"Delete-older-the-7-days\", recipientAccountId=654338056632, managementEvent=false, additionalEventData.bytesTransferredIn=480}", + "unmapped": "{additionalEventData.SSEApplied=Default_SSE_S3, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, responseElements.x-amz-version-id=LXBDlV4KJJB56OUOvwPNlb1v4Re5gpKH, additionalEventData.x-amz-id-2=0/+WsTcJgOlNKdHK/L2FmlK7IyBHUCeZDaXhRKtYAlWrPy0oTMVVtX41yxCh3HE5s2YMLFpwyGc=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=s3.eu-central-1.amazonaws.com, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=ae9c5147-998a-4a83-ae76-dd1916795763, requestParameters.key=2023-11-07-07-39-32-43DC7FEEAE7DFCA6, requestParameters.bucketName=test-log-collector, responseElements.x-amz-server-side-encryption=AES256, responseElements.x-amz-expiration=expiry-date=\"Wed, 15 Nov 2023 00:00:00 GMT\", rule-id=\"Delete-older-the-7-days\", recipientAccountId=123456789012, managementEvent=false, additionalEventData.bytesTransferredIn=480}", "region": "eu-central-1", - "accountid": "654338056632", + "accountid": "123456789012", "eventday": "20231107", "query_execution_id": "b3c194e7-6580-421c-81fa-4b409e1ba04f" }, @@ -88,7 +88,7 @@ "actor": "{user={type=AWSService, name=null, uid=null, uuid=null, account_uid=null, credential_uid=null}, session={created_time=null, mfa=null, issuer=null}, invoked_by=cloudtrail.amazonaws.com, idp={name=null}}", "http_request": "{user_agent=cloudtrail.amazonaws.com}", "src_endpoint": "{uid=null, ip=null, domain=cloudtrail.amazonaws.com}", - "resources": "[{uid=arn:aws:s3:::aws-cloudtrail-logs-654338056632-cloudlake/AWSLogs/o-re4vuxlksb/654338056632/CloudTrail/eu-central-1/2023/11/04/654338056632_CloudTrail_eu-central-1_20231104T2230Z_Ei3HUNUHdxFCuo9l.json.gz, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::aws-cloudtrail-logs-654338056632-cloudlake, account_uid=654338056632, type=AWS::S3::Bucket}]", + "resources": "[{uid=arn:aws:s3:::aws-cloudtrail-logs-123456789012-cloudlake/AWSLogs/o-re4vuxlksb/123456789012/CloudTrail/eu-central-1/2023/11/04/123456789012_CloudTrail_eu-central-1_20231104T2230Z_Ei3HUNUHdxFCuo9l.json.gz, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::aws-cloudtrail-logs-123456789012-cloudlake, account_uid=123456789012, type=AWS::S3::Bucket}]", "class_name": "API Activity", "class_uid": "3005", "category_name": "Audit Activity", @@ -101,9 +101,9 @@ "type_name": "API Activity: Update", "status": "Success", "status_id": "1", - "unmapped": "{additionalEventData.SSEApplied=SSE_S3, requestParameters.x-amz-acl=bucket-owner-full-control, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, additionalEventData.x-amz-id-2=RITtRiXX1DGn4aCJ1AguFwc0Ux/HS6LSgIjiGlJBFdeFWMwmqQk1TlibDKq5kIA2xrSZc/qyl8w=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=aws-cloudtrail-logs-654338056632-cloudlake.s3.eu-central-1.amazonaws.com, requestParameters.x-amz-server-side-encryption=AES256, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=150e3c90-ce64-4311-b9de-67ef7077751a, requestParameters.key=AWSLogs/o-re4vuxlksb/654338056632/CloudTrail/eu-central-1/2023/11/04/654338056632_CloudTrail_eu-central-1_20231104T2230Z_Ei3HUNUHdxFCuo9l.json.gz, requestParameters.bucketName=aws-cloudtrail-logs-654338056632-cloudlake, responseElements.x-amz-server-side-encryption=AES256, recipientAccountId=654338056632, managementEvent=false, additionalEventData.bytesTransferredIn=1317}", + "unmapped": "{additionalEventData.SSEApplied=SSE_S3, requestParameters.x-amz-acl=bucket-owner-full-control, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, additionalEventData.x-amz-id-2=RITtRiXX1DGn4aCJ1AguFwc0Ux/HS6LSgIjiGlJBFdeFWMwmqQk1TlibDKq5kIA2xrSZc/qyl8w=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=aws-cloudtrail-logs-123456789012-cloudlake.s3.eu-central-1.amazonaws.com, requestParameters.x-amz-server-side-encryption=AES256, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=150e3c90-ce64-4311-b9de-67ef7077751a, requestParameters.key=AWSLogs/o-re4vuxlksb/123456789012/CloudTrail/eu-central-1/2023/11/04/123456789012_CloudTrail_eu-central-1_20231104T2230Z_Ei3HUNUHdxFCuo9l.json.gz, requestParameters.bucketName=aws-cloudtrail-logs-123456789012-cloudlake, responseElements.x-amz-server-side-encryption=AES256, recipientAccountId=123456789012, managementEvent=false, additionalEventData.bytesTransferredIn=1317}", "region": "eu-central-1", - "accountid": "654338056632", + "accountid": "123456789012", "eventday": "20231104", "query_execution_id": "b3c194e7-6580-421c-81fa-4b409e1ba04f" }, @@ -115,7 +115,7 @@ "actor": "{user={type=AWSService, name=null, uid=null, uuid=null, account_uid=null, credential_uid=null}, session={created_time=null, mfa=null, issuer=null}, invoked_by=cloudtrail.amazonaws.com, idp={name=null}}", "http_request": "{user_agent=cloudtrail.amazonaws.com}", "src_endpoint": "{uid=null, ip=null, domain=cloudtrail.amazonaws.com}", - "resources": "[{uid=arn:aws:s3:::aws-cloudtrail-logs-654338056632-cloudlake/AWSLogs/o-re4vuxlksb/668688824393/CloudTrail/eu-central-1/2023/11/04/668688824393_CloudTrail_eu-central-1_20231104T2230Z_cvjIkDeGdNDHOgZs.json.gz, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::aws-cloudtrail-logs-654338056632-cloudlake, account_uid=654338056632, type=AWS::S3::Bucket}]", + "resources": "[{uid=arn:aws:s3:::aws-cloudtrail-logs-123456789012-cloudlake/AWSLogs/o-re4vuxlksb/668688824393/CloudTrail/eu-central-1/2023/11/04/668688824393_CloudTrail_eu-central-1_20231104T2230Z_cvjIkDeGdNDHOgZs.json.gz, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::aws-cloudtrail-logs-123456789012-cloudlake, account_uid=123456789012, type=AWS::S3::Bucket}]", "class_name": "API Activity", "class_uid": "3005", "category_name": "Audit Activity", @@ -128,9 +128,9 @@ "type_name": "API Activity: Update", "status": "Success", "status_id": "1", - "unmapped": "{additionalEventData.SSEApplied=SSE_S3, requestParameters.x-amz-acl=bucket-owner-full-control, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, additionalEventData.x-amz-id-2=p+d+4x2c5Cq78w5YiikRKYtXsRxTFKFiklo3uPsWV9VkYTbLqxQxlKSOvJ/3pyFMV0ghlX4jSiw=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=aws-cloudtrail-logs-654338056632-cloudlake.s3.eu-central-1.amazonaws.com, requestParameters.x-amz-server-side-encryption=AES256, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=f7f916ab-0dd4-4e99-b522-b415b9c80458, requestParameters.key=AWSLogs/o-re4vuxlksb/668688824393/CloudTrail/eu-central-1/2023/11/04/668688824393_CloudTrail_eu-central-1_20231104T2230Z_cvjIkDeGdNDHOgZs.json.gz, requestParameters.bucketName=aws-cloudtrail-logs-654338056632-cloudlake, responseElements.x-amz-server-side-encryption=AES256, recipientAccountId=654338056632, managementEvent=false, additionalEventData.bytesTransferredIn=1199}", + "unmapped": "{additionalEventData.SSEApplied=SSE_S3, requestParameters.x-amz-acl=bucket-owner-full-control, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, additionalEventData.x-amz-id-2=p+d+4x2c5Cq78w5YiikRKYtXsRxTFKFiklo3uPsWV9VkYTbLqxQxlKSOvJ/3pyFMV0ghlX4jSiw=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=aws-cloudtrail-logs-123456789012-cloudlake.s3.eu-central-1.amazonaws.com, requestParameters.x-amz-server-side-encryption=AES256, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=f7f916ab-0dd4-4e99-b522-b415b9c80458, requestParameters.key=AWSLogs/o-re4vuxlksb/668688824393/CloudTrail/eu-central-1/2023/11/04/668688824393_CloudTrail_eu-central-1_20231104T2230Z_cvjIkDeGdNDHOgZs.json.gz, requestParameters.bucketName=aws-cloudtrail-logs-123456789012-cloudlake, responseElements.x-amz-server-side-encryption=AES256, recipientAccountId=123456789012, managementEvent=false, additionalEventData.bytesTransferredIn=1199}", "region": "eu-central-1", - "accountid": "654338056632", + "accountid": "123456789012", "eventday": "20231104", "query_execution_id": "b3c194e7-6580-421c-81fa-4b409e1ba04f" }, @@ -142,7 +142,7 @@ "actor": "{user={type=AWSService, name=null, uid=null, uuid=null, account_uid=null, credential_uid=null}, session={created_time=null, mfa=null, issuer=null}, invoked_by=cloudtrail.amazonaws.com, idp={name=null}}", "http_request": "{user_agent=cloudtrail.amazonaws.com}", "src_endpoint": "{uid=null, ip=null, domain=cloudtrail.amazonaws.com}", - "resources": "[{uid=arn:aws:s3:::aws-cloudtrail-logs-654338056632-cloudlake/AWSLogs/o-re4vuxlksb/654338056632/CloudTrail/eu-central-1/2023/11/04/654338056632_CloudTrail_eu-central-1_20231104T2235Z_yvpVSHH3613gs2AK.json.gz, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::aws-cloudtrail-logs-654338056632-cloudlake, account_uid=654338056632, type=AWS::S3::Bucket}]", + "resources": "[{uid=arn:aws:s3:::aws-cloudtrail-logs-123456789012-cloudlake/AWSLogs/o-re4vuxlksb/123456789012/CloudTrail/eu-central-1/2023/11/04/123456789012_CloudTrail_eu-central-1_20231104T2235Z_yvpVSHH3613gs2AK.json.gz, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::aws-cloudtrail-logs-123456789012-cloudlake, account_uid=123456789012, type=AWS::S3::Bucket}]", "class_name": "API Activity", "class_uid": "3005", "category_name": "Audit Activity", @@ -155,9 +155,9 @@ "type_name": "API Activity: Update", "status": "Success", "status_id": "1", - "unmapped": "{additionalEventData.SSEApplied=SSE_S3, requestParameters.x-amz-acl=bucket-owner-full-control, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, additionalEventData.x-amz-id-2=xm6fkjpc6B9awVdyl1jxnkfp+1boyZ3slsj3MtybxFfeJ+fhC84Il8k2jKEVlK91DdoRzM+RVIw=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=aws-cloudtrail-logs-654338056632-cloudlake.s3.eu-central-1.amazonaws.com, requestParameters.x-amz-server-side-encryption=AES256, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=083a753d-7aae-4dcb-b5d9-703cfab873b4, requestParameters.key=AWSLogs/o-re4vuxlksb/654338056632/CloudTrail/eu-central-1/2023/11/04/654338056632_CloudTrail_eu-central-1_20231104T2235Z_yvpVSHH3613gs2AK.json.gz, requestParameters.bucketName=aws-cloudtrail-logs-654338056632-cloudlake, responseElements.x-amz-server-side-encryption=AES256, recipientAccountId=654338056632, managementEvent=false, additionalEventData.bytesTransferredIn=1556}", + "unmapped": "{additionalEventData.SSEApplied=SSE_S3, requestParameters.x-amz-acl=bucket-owner-full-control, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, additionalEventData.x-amz-id-2=xm6fkjpc6B9awVdyl1jxnkfp+1boyZ3slsj3MtybxFfeJ+fhC84Il8k2jKEVlK91DdoRzM+RVIw=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=aws-cloudtrail-logs-123456789012-cloudlake.s3.eu-central-1.amazonaws.com, requestParameters.x-amz-server-side-encryption=AES256, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=083a753d-7aae-4dcb-b5d9-703cfab873b4, requestParameters.key=AWSLogs/o-re4vuxlksb/123456789012/CloudTrail/eu-central-1/2023/11/04/123456789012_CloudTrail_eu-central-1_20231104T2235Z_yvpVSHH3613gs2AK.json.gz, requestParameters.bucketName=aws-cloudtrail-logs-123456789012-cloudlake, responseElements.x-amz-server-side-encryption=AES256, recipientAccountId=123456789012, managementEvent=false, additionalEventData.bytesTransferredIn=1556}", "region": "eu-central-1", - "accountid": "654338056632", + "accountid": "123456789012", "eventday": "20231104", "query_execution_id": "b3c194e7-6580-421c-81fa-4b409e1ba04f" }, @@ -169,7 +169,7 @@ "actor": "{user={type=AWSService, name=null, uid=null, uuid=null, account_uid=null, credential_uid=null}, session={created_time=null, mfa=null, issuer=null}, invoked_by=cloudtrail.amazonaws.com, idp={name=null}}", "http_request": "{user_agent=cloudtrail.amazonaws.com}", "src_endpoint": "{uid=null, ip=null, domain=cloudtrail.amazonaws.com}", - "resources": "[{uid=arn:aws:s3:::aws-cloudtrail-logs-654338056632-cloudlake/AWSLogs/o-re4vuxlksb/668688824393/CloudTrail/eu-central-1/2023/11/07/668688824393_CloudTrail_eu-central-1_20231107T0740Z_jtWHnUtmZvFb95ZI.json.gz, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::aws-cloudtrail-logs-654338056632-cloudlake, account_uid=654338056632, type=AWS::S3::Bucket}]", + "resources": "[{uid=arn:aws:s3:::aws-cloudtrail-logs-123456789012-cloudlake/AWSLogs/o-re4vuxlksb/668688824393/CloudTrail/eu-central-1/2023/11/07/668688824393_CloudTrail_eu-central-1_20231107T0740Z_jtWHnUtmZvFb95ZI.json.gz, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::aws-cloudtrail-logs-123456789012-cloudlake, account_uid=123456789012, type=AWS::S3::Bucket}]", "class_name": "API Activity", "class_uid": "3005", "category_name": "Audit Activity", @@ -182,9 +182,9 @@ "type_name": "API Activity: Update", "status": "Success", "status_id": "1", - "unmapped": "{additionalEventData.SSEApplied=SSE_S3, requestParameters.x-amz-acl=bucket-owner-full-control, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, additionalEventData.x-amz-id-2=o5fB4Haj65lh3PSSxZ3GImGDkSUUQ2Vy0qEH2kifNcpzIn9KWAL4VECS2HMMq8abRt7X9q1X3W9r0tioo1ytzQ==, readOnly=false, eventType=AwsApiCall, requestParameters.Host=aws-cloudtrail-logs-654338056632-cloudlake.s3.eu-central-1.amazonaws.com, requestParameters.x-amz-server-side-encryption=AES256, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=b1991000-4fff-4444-a70e-c615aae8db9e, requestParameters.key=AWSLogs/o-re4vuxlksb/668688824393/CloudTrail/eu-central-1/2023/11/07/668688824393_CloudTrail_eu-central-1_20231107T0740Z_jtWHnUtmZvFb95ZI.json.gz, requestParameters.bucketName=aws-cloudtrail-logs-654338056632-cloudlake, responseElements.x-amz-server-side-encryption=AES256, recipientAccountId=654338056632, managementEvent=false, additionalEventData.bytesTransferredIn=1580}", + "unmapped": "{additionalEventData.SSEApplied=SSE_S3, requestParameters.x-amz-acl=bucket-owner-full-control, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, additionalEventData.x-amz-id-2=o5fB4Haj65lh3PSSxZ3GImGDkSUUQ2Vy0qEH2kifNcpzIn9KWAL4VECS2HMMq8abRt7X9q1X3W9r0tioo1ytzQ==, readOnly=false, eventType=AwsApiCall, requestParameters.Host=aws-cloudtrail-logs-123456789012-cloudlake.s3.eu-central-1.amazonaws.com, requestParameters.x-amz-server-side-encryption=AES256, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=b1991000-4fff-4444-a70e-c615aae8db9e, requestParameters.key=AWSLogs/o-re4vuxlksb/668688824393/CloudTrail/eu-central-1/2023/11/07/668688824393_CloudTrail_eu-central-1_20231107T0740Z_jtWHnUtmZvFb95ZI.json.gz, requestParameters.bucketName=aws-cloudtrail-logs-123456789012-cloudlake, responseElements.x-amz-server-side-encryption=AES256, recipientAccountId=123456789012, managementEvent=false, additionalEventData.bytesTransferredIn=1580}", "region": "eu-central-1", - "accountid": "654338056632", + "accountid": "123456789012", "eventday": "20231107", "query_execution_id": "b3c194e7-6580-421c-81fa-4b409e1ba04f" }, @@ -196,7 +196,7 @@ "actor": "{user={type=AWSService, name=null, uid=null, uuid=null, account_uid=null, credential_uid=null}, session={created_time=null, mfa=null, issuer=null}, invoked_by=cloudtrail.amazonaws.com, idp={name=null}}", "http_request": "{user_agent=cloudtrail.amazonaws.com}", "src_endpoint": "{uid=null, ip=null, domain=cloudtrail.amazonaws.com}", - "resources": "[{uid=arn:aws:s3:::aws-cloudtrail-logs-654338056632-cloudlake/AWSLogs/o-re4vuxlksb/668688824393/CloudTrail/eu-central-1/2023/11/07/668688824393_CloudTrail_eu-central-1_20231107T0740Z_xvmpLIGhOzmt4n3B.json.gz, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::aws-cloudtrail-logs-654338056632-cloudlake, account_uid=654338056632, type=AWS::S3::Bucket}]", + "resources": "[{uid=arn:aws:s3:::aws-cloudtrail-logs-123456789012-cloudlake/AWSLogs/o-re4vuxlksb/668688824393/CloudTrail/eu-central-1/2023/11/07/668688824393_CloudTrail_eu-central-1_20231107T0740Z_xvmpLIGhOzmt4n3B.json.gz, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::aws-cloudtrail-logs-123456789012-cloudlake, account_uid=123456789012, type=AWS::S3::Bucket}]", "class_name": "API Activity", "class_uid": "3005", "category_name": "Audit Activity", @@ -209,9 +209,9 @@ "type_name": "API Activity: Update", "status": "Success", "status_id": "1", - "unmapped": "{additionalEventData.SSEApplied=SSE_S3, requestParameters.x-amz-acl=bucket-owner-full-control, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, additionalEventData.x-amz-id-2=iypPsK3E1swmRKTBokcowNGxPCNWvgnjXmMyOOvajrl8bGqbQVTmPUbXGLOYR2z553KaKn/HZF0=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=aws-cloudtrail-logs-654338056632-cloudlake.s3.eu-central-1.amazonaws.com, requestParameters.x-amz-server-side-encryption=AES256, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=d0089b3a-0175-4732-8726-aa8b83557c19, requestParameters.key=AWSLogs/o-re4vuxlksb/668688824393/CloudTrail/eu-central-1/2023/11/07/668688824393_CloudTrail_eu-central-1_20231107T0740Z_xvmpLIGhOzmt4n3B.json.gz, requestParameters.bucketName=aws-cloudtrail-logs-654338056632-cloudlake, responseElements.x-amz-server-side-encryption=AES256, recipientAccountId=654338056632, managementEvent=false, additionalEventData.bytesTransferredIn=2276}", + "unmapped": "{additionalEventData.SSEApplied=SSE_S3, requestParameters.x-amz-acl=bucket-owner-full-control, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, additionalEventData.x-amz-id-2=iypPsK3E1swmRKTBokcowNGxPCNWvgnjXmMyOOvajrl8bGqbQVTmPUbXGLOYR2z553KaKn/HZF0=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=aws-cloudtrail-logs-123456789012-cloudlake.s3.eu-central-1.amazonaws.com, requestParameters.x-amz-server-side-encryption=AES256, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=d0089b3a-0175-4732-8726-aa8b83557c19, requestParameters.key=AWSLogs/o-re4vuxlksb/668688824393/CloudTrail/eu-central-1/2023/11/07/668688824393_CloudTrail_eu-central-1_20231107T0740Z_xvmpLIGhOzmt4n3B.json.gz, requestParameters.bucketName=aws-cloudtrail-logs-123456789012-cloudlake, responseElements.x-amz-server-side-encryption=AES256, recipientAccountId=123456789012, managementEvent=false, additionalEventData.bytesTransferredIn=2276}", "region": "eu-central-1", - "accountid": "654338056632", + "accountid": "123456789012", "eventday": "20231107", "query_execution_id": "b3c194e7-6580-421c-81fa-4b409e1ba04f" }, @@ -223,7 +223,7 @@ "actor": "{user={type=AWSService, name=null, uid=null, uuid=null, account_uid=null, credential_uid=null}, session={created_time=null, mfa=null, issuer=null}, invoked_by=s3.amazonaws.com, idp={name=null}}", "http_request": "{user_agent=s3.amazonaws.com}", "src_endpoint": "{uid=vpce-59a25a30, ip=null, domain=s3.amazonaws.com}", - "resources": "[{uid=arn:aws:s3:::test-log-collector/2023-11-07-07-41-57-110CCCE05A2BEE37, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::test-log-collector, account_uid=654338056632, type=AWS::S3::Bucket}]", + "resources": "[{uid=arn:aws:s3:::test-log-collector/2023-11-07-07-41-57-110CCCE05A2BEE37, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::test-log-collector, account_uid=123456789012, type=AWS::S3::Bucket}]", "class_name": "API Activity", "class_uid": "3005", "category_name": "Audit Activity", @@ -236,9 +236,9 @@ "type_name": "API Activity: Update", "status": "Success", "status_id": "1", - "unmapped": "{additionalEventData.SSEApplied=Default_SSE_S3, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, responseElements.x-amz-version-id=193ieSvoNFv1o.tUJKgchRloNHbERbcu, additionalEventData.x-amz-id-2=jIV/CP92IrpLpnNRe2zwJj9+c9Rg3EsUyM2AIRSYR19hm8Umi4gnDdP9NrDJszZO4EkOgWiJQz8=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=s3.eu-central-1.amazonaws.com, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=80ec7a41-6396-453a-8ec9-993c46b371be, requestParameters.key=2023-11-07-07-41-57-110CCCE05A2BEE37, requestParameters.bucketName=test-log-collector, responseElements.x-amz-server-side-encryption=AES256, responseElements.x-amz-expiration=expiry-date=\"Wed, 15 Nov 2023 00:00:00 GMT\", rule-id=\"Delete-older-the-7-days\", recipientAccountId=654338056632, managementEvent=false, additionalEventData.bytesTransferredIn=480}", + "unmapped": "{additionalEventData.SSEApplied=Default_SSE_S3, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, responseElements.x-amz-version-id=193ieSvoNFv1o.tUJKgchRloNHbERbcu, additionalEventData.x-amz-id-2=jIV/CP92IrpLpnNRe2zwJj9+c9Rg3EsUyM2AIRSYR19hm8Umi4gnDdP9NrDJszZO4EkOgWiJQz8=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=s3.eu-central-1.amazonaws.com, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=80ec7a41-6396-453a-8ec9-993c46b371be, requestParameters.key=2023-11-07-07-41-57-110CCCE05A2BEE37, requestParameters.bucketName=test-log-collector, responseElements.x-amz-server-side-encryption=AES256, responseElements.x-amz-expiration=expiry-date=\"Wed, 15 Nov 2023 00:00:00 GMT\", rule-id=\"Delete-older-the-7-days\", recipientAccountId=123456789012, managementEvent=false, additionalEventData.bytesTransferredIn=480}", "region": "eu-central-1", - "accountid": "654338056632", + "accountid": "123456789012", "eventday": "20231107", "query_execution_id": "b3c194e7-6580-421c-81fa-4b409e1ba04f" }, @@ -250,7 +250,7 @@ "actor": "{user={type=AWSService, name=null, uid=null, uuid=null, account_uid=null, credential_uid=null}, session={created_time=null, mfa=null, issuer=null}, invoked_by=s3.amazonaws.com, idp={name=null}}", "http_request": "{user_agent=s3.amazonaws.com}", "src_endpoint": "{uid=vpce-59a25a30, ip=null, domain=s3.amazonaws.com}", - "resources": "[{uid=arn:aws:s3:::test-log-collector/2023-11-07-07-42-28-FC0CDE158967D4CF, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::test-log-collector, account_uid=654338056632, type=AWS::S3::Bucket}]", + "resources": "[{uid=arn:aws:s3:::test-log-collector/2023-11-07-07-42-28-FC0CDE158967D4CF, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::test-log-collector, account_uid=123456789012, type=AWS::S3::Bucket}]", "class_name": "API Activity", "class_uid": "3005", "category_name": "Audit Activity", @@ -263,9 +263,9 @@ "type_name": "API Activity: Update", "status": "Success", "status_id": "1", - "unmapped": "{additionalEventData.SSEApplied=Default_SSE_S3, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, responseElements.x-amz-version-id=IBZgLYMsVU50YUHYOt9rQ1R28oTy_rHd, additionalEventData.x-amz-id-2=TCnuNhf35xx4buXCo34P8TdlAV4hkDit07I8iUuqpjcyvWOXRGuPAuFGd+5tbmPRXN5+MbY57zU=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=s3.eu-central-1.amazonaws.com, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=93494c71-ec89-4b9a-8607-6f8ff7460ff9, requestParameters.key=2023-11-07-07-42-28-FC0CDE158967D4CF, requestParameters.bucketName=test-log-collector, responseElements.x-amz-server-side-encryption=AES256, responseElements.x-amz-expiration=expiry-date=\"Wed, 15 Nov 2023 00:00:00 GMT\", rule-id=\"Delete-older-the-7-days\", recipientAccountId=654338056632, managementEvent=false, additionalEventData.bytesTransferredIn=960}", + "unmapped": "{additionalEventData.SSEApplied=Default_SSE_S3, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, responseElements.x-amz-version-id=IBZgLYMsVU50YUHYOt9rQ1R28oTy_rHd, additionalEventData.x-amz-id-2=TCnuNhf35xx4buXCo34P8TdlAV4hkDit07I8iUuqpjcyvWOXRGuPAuFGd+5tbmPRXN5+MbY57zU=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=s3.eu-central-1.amazonaws.com, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=93494c71-ec89-4b9a-8607-6f8ff7460ff9, requestParameters.key=2023-11-07-07-42-28-FC0CDE158967D4CF, requestParameters.bucketName=test-log-collector, responseElements.x-amz-server-side-encryption=AES256, responseElements.x-amz-expiration=expiry-date=\"Wed, 15 Nov 2023 00:00:00 GMT\", rule-id=\"Delete-older-the-7-days\", recipientAccountId=123456789012, managementEvent=false, additionalEventData.bytesTransferredIn=960}", "region": "eu-central-1", - "accountid": "654338056632", + "accountid": "123456789012", "eventday": "20231107", "query_execution_id": "b3c194e7-6580-421c-81fa-4b409e1ba04f" } diff --git a/Packs/AWS-SecurityLake/Integrations/AWSSecurityLake/test_data/expected_hr/get_query_results_command.txt b/Packs/AWS-SecurityLake/Integrations/AWSSecurityLake/test_data/expected_hr/get_query_results_command.txt index dadfbb63bbcd..5dbbc3075aa7 100644 --- a/Packs/AWS-SecurityLake/Integrations/AWSSecurityLake/test_data/expected_hr/get_query_results_command.txt +++ b/Packs/AWS-SecurityLake/Integrations/AWSSecurityLake/test_data/expected_hr/get_query_results_command.txt @@ -1,13 +1,13 @@ ### AWS Athena Query Results |accountid|activity_id|activity_name|actor|api|category_name|category_uid|class_name|class_uid|cloud|eventday|http_request|metadata|query_execution_id|region|resources|severity|severity_id|src_endpoint|status|status_id|time|type_name|type_uid|unmapped| |---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---| -| 654338056632 | 3 | Update | {user={type=AWSService, name=null, uid=null, uuid=null, account_uid=null, credential_uid=null}, session={created_time=null, mfa=null, issuer=null}, invoked_by=s3.amazonaws.com, idp={name=null}} | {response={error=null, message=null}, operation=PutObject, version=null, service={name=s3.amazonaws.com}, request={uid=5AV2YZSR7D9DFDW8}} | Audit Activity | 3 | API Activity | 3005 | {region=eu-central-1, provider=AWS} | 20231107 | {user_agent=s3.amazonaws.com} | {product={version=1.09, name=CloudTrail, vendor_name=AWS, feature={name=Data}}, uid=7c33bcd3-0252-4b28-b2c7-7f38ed881796, profiles=[cloud], version=1.0.0-rc.2} | b3c194e7-6580-421c-81fa-4b409e1ba04f | eu-central-1 | [{uid=arn:aws:s3:::test-log-collector/2023-11-07-07-40-08-CD1B1BC0934C71D4, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::test-log-collector, account_uid=654338056632, type=AWS::S3::Bucket}] | Informational | 1 | {uid=vpce-59a25a30, ip=null, domain=s3.amazonaws.com} | Success | 1 | 1699342808000 | API Activity: Update | 300503 | {additionalEventData.SSEApplied=Default_SSE_S3, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, responseElements.x-amz-version-id=CSwJV1zFxiBSHjrTGaIUH5FMMVcgl05W, additionalEventData.x-amz-id-2=71IAMvFJ3O5bJRBlpJCB3l0B8CzNy5sG7BECw2I1R4oHyvkV2FKixZqmYAir4Y5h5ldtxWA+xlk=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=s3.eu-central-1.amazonaws.com, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=6ffdc2dd-05ca-483e-8baa-842949fddced, requestParameters.key=2023-11-07-07-40-08-CD1B1BC0934C71D4, requestParameters.bucketName=test-log-collector, responseElements.x-amz-server-side-encryption=AES256, responseElements.x-amz-expiration=expiry-date="Wed, 15 Nov 2023 00:00:00 GMT", rule-id="Delete-older-the-7-days", recipientAccountId=654338056632, managementEvent=false, additionalEventData.bytesTransferredIn=480} | -| 654338056632 | 3 | Update | {user={type=AWSService, name=null, uid=null, uuid=null, account_uid=null, credential_uid=null}, session={created_time=null, mfa=null, issuer=null}, invoked_by=cloudtrail.amazonaws.com, idp={name=null}} | {response={error=null, message=null}, operation=PutObject, version=null, service={name=s3.amazonaws.com}, request={uid=BRW2SJ5SBFD7T91W}} | Audit Activity | 3 | API Activity | 3005 | {region=eu-central-1, provider=AWS} | 20231107 | {user_agent=cloudtrail.amazonaws.com} | {product={version=1.09, name=CloudTrail, vendor_name=AWS, feature={name=Data}}, uid=69d54eee-2c1c-4f51-b89f-45c7029695c6, profiles=[cloud], version=1.0.0-rc.2} | b3c194e7-6580-421c-81fa-4b409e1ba04f | eu-central-1 | [{uid=arn:aws:s3:::aws-cloudtrail-logs-654338056632-cloudlake/AWSLogs/o-re4vuxlksb/654338056632/CloudTrail/eu-central-1/2023/11/07/654338056632_CloudTrail_eu-central-1_20231107T0740Z_dimNGWjAynOPPS1e.json.gz, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::aws-cloudtrail-logs-654338056632-cloudlake, account_uid=654338056632, type=AWS::S3::Bucket}] | Informational | 1 | {uid=null, ip=null, domain=cloudtrail.amazonaws.com} | Success | 1 | 1699342833000 | API Activity: Update | 300503 | {additionalEventData.SSEApplied=SSE_S3, requestParameters.x-amz-acl=bucket-owner-full-control, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, additionalEventData.x-amz-id-2=CKIV9mfKLdZHOitE2UvDr21Km4whgr92dar3i5Ew4/upKPfXc97MP45lpxnGo1mhPW7RQgvuQEs=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=aws-cloudtrail-logs-654338056632-cloudlake.s3.eu-central-1.amazonaws.com, requestParameters.x-amz-server-side-encryption=AES256, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=cd4f5d26-4491-40fc-a7d9-cb10f3e99ed3, requestParameters.key=AWSLogs/o-re4vuxlksb/654338056632/CloudTrail/eu-central-1/2023/11/07/654338056632_CloudTrail_eu-central-1_20231107T0740Z_dimNGWjAynOPPS1e.json.gz, requestParameters.bucketName=aws-cloudtrail-logs-654338056632-cloudlake, responseElements.x-amz-server-side-encryption=AES256, recipientAccountId=654338056632, managementEvent=false, additionalEventData.bytesTransferredIn=2347} | -| 654338056632 | 3 | Update | {user={type=AWSService, name=null, uid=null, uuid=null, account_uid=null, credential_uid=null}, session={created_time=null, mfa=null, issuer=null}, invoked_by=s3.amazonaws.com, idp={name=null}} | {response={error=null, message=null}, operation=PutObject, version=null, service={name=s3.amazonaws.com}, request={uid=55TVMR6HYD2ABTWB}} | Audit Activity | 3 | API Activity | 3005 | {region=eu-central-1, provider=AWS} | 20231107 | {user_agent=s3.amazonaws.com} | {product={version=1.09, name=CloudTrail, vendor_name=AWS, feature={name=Data}}, uid=2a0b2b1a-bff6-4a89-93e5-801e3fdf3b92, profiles=[cloud], version=1.0.0-rc.2} | b3c194e7-6580-421c-81fa-4b409e1ba04f | eu-central-1 | [{uid=arn:aws:s3:::test-log-collector/2023-11-07-07-39-32-43DC7FEEAE7DFCA6, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::test-log-collector, account_uid=654338056632, type=AWS::S3::Bucket}] | Informational | 1 | {uid=vpce-59a25a30, ip=null, domain=s3.amazonaws.com} | Success | 1 | 1699342772000 | API Activity: Update | 300503 | {additionalEventData.SSEApplied=Default_SSE_S3, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, responseElements.x-amz-version-id=LXBDlV4KJJB56OUOvwPNlb1v4Re5gpKH, additionalEventData.x-amz-id-2=0/+WsTcJgOlNKdHK/L2FmlK7IyBHUCeZDaXhRKtYAlWrPy0oTMVVtX41yxCh3HE5s2YMLFpwyGc=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=s3.eu-central-1.amazonaws.com, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=ae9c5147-998a-4a83-ae76-dd1916795763, requestParameters.key=2023-11-07-07-39-32-43DC7FEEAE7DFCA6, requestParameters.bucketName=test-log-collector, responseElements.x-amz-server-side-encryption=AES256, responseElements.x-amz-expiration=expiry-date="Wed, 15 Nov 2023 00:00:00 GMT", rule-id="Delete-older-the-7-days", recipientAccountId=654338056632, managementEvent=false, additionalEventData.bytesTransferredIn=480} | -| 654338056632 | 3 | Update | {user={type=AWSService, name=null, uid=null, uuid=null, account_uid=null, credential_uid=null}, session={created_time=null, mfa=null, issuer=null}, invoked_by=cloudtrail.amazonaws.com, idp={name=null}} | {response={error=null, message=null}, operation=PutObject, version=null, service={name=s3.amazonaws.com}, request={uid=5E1S778MRDJEDVSR}} | Audit Activity | 3 | API Activity | 3005 | {region=eu-central-1, provider=AWS} | 20231104 | {user_agent=cloudtrail.amazonaws.com} | {product={version=1.09, name=CloudTrail, vendor_name=AWS, feature={name=Data}}, uid=fee096f2-611d-4b3c-b092-ea06e8a527ca, profiles=[cloud], version=1.0.0-rc.2} | b3c194e7-6580-421c-81fa-4b409e1ba04f | eu-central-1 | [{uid=arn:aws:s3:::aws-cloudtrail-logs-654338056632-cloudlake/AWSLogs/o-re4vuxlksb/654338056632/CloudTrail/eu-central-1/2023/11/04/654338056632_CloudTrail_eu-central-1_20231104T2230Z_Ei3HUNUHdxFCuo9l.json.gz, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::aws-cloudtrail-logs-654338056632-cloudlake, account_uid=654338056632, type=AWS::S3::Bucket}] | Informational | 1 | {uid=null, ip=null, domain=cloudtrail.amazonaws.com} | Success | 1 | 1699137065000 | API Activity: Update | 300503 | {additionalEventData.SSEApplied=SSE_S3, requestParameters.x-amz-acl=bucket-owner-full-control, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, additionalEventData.x-amz-id-2=RITtRiXX1DGn4aCJ1AguFwc0Ux/HS6LSgIjiGlJBFdeFWMwmqQk1TlibDKq5kIA2xrSZc/qyl8w=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=aws-cloudtrail-logs-654338056632-cloudlake.s3.eu-central-1.amazonaws.com, requestParameters.x-amz-server-side-encryption=AES256, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=150e3c90-ce64-4311-b9de-67ef7077751a, requestParameters.key=AWSLogs/o-re4vuxlksb/654338056632/CloudTrail/eu-central-1/2023/11/04/654338056632_CloudTrail_eu-central-1_20231104T2230Z_Ei3HUNUHdxFCuo9l.json.gz, requestParameters.bucketName=aws-cloudtrail-logs-654338056632-cloudlake, responseElements.x-amz-server-side-encryption=AES256, recipientAccountId=654338056632, managementEvent=false, additionalEventData.bytesTransferredIn=1317} | -| 654338056632 | 3 | Update | {user={type=AWSService, name=null, uid=null, uuid=null, account_uid=null, credential_uid=null}, session={created_time=null, mfa=null, issuer=null}, invoked_by=cloudtrail.amazonaws.com, idp={name=null}} | {response={error=null, message=null}, operation=PutObject, version=null, service={name=s3.amazonaws.com}, request={uid=VDKRDR9XNA8H2MF8}} | Audit Activity | 3 | API Activity | 3005 | {region=eu-central-1, provider=AWS} | 20231104 | {user_agent=cloudtrail.amazonaws.com} | {product={version=1.09, name=CloudTrail, vendor_name=AWS, feature={name=Data}}, uid=68481d66-4a3e-42fe-a878-a1e6a7176c16, profiles=[cloud], version=1.0.0-rc.2} | b3c194e7-6580-421c-81fa-4b409e1ba04f | eu-central-1 | [{uid=arn:aws:s3:::aws-cloudtrail-logs-654338056632-cloudlake/AWSLogs/o-re4vuxlksb/668688824393/CloudTrail/eu-central-1/2023/11/04/668688824393_CloudTrail_eu-central-1_20231104T2230Z_cvjIkDeGdNDHOgZs.json.gz, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::aws-cloudtrail-logs-654338056632-cloudlake, account_uid=654338056632, type=AWS::S3::Bucket}] | Informational | 1 | {uid=null, ip=null, domain=cloudtrail.amazonaws.com} | Success | 1 | 1699137170000 | API Activity: Update | 300503 | {additionalEventData.SSEApplied=SSE_S3, requestParameters.x-amz-acl=bucket-owner-full-control, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, additionalEventData.x-amz-id-2=p+d+4x2c5Cq78w5YiikRKYtXsRxTFKFiklo3uPsWV9VkYTbLqxQxlKSOvJ/3pyFMV0ghlX4jSiw=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=aws-cloudtrail-logs-654338056632-cloudlake.s3.eu-central-1.amazonaws.com, requestParameters.x-amz-server-side-encryption=AES256, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=f7f916ab-0dd4-4e99-b522-b415b9c80458, requestParameters.key=AWSLogs/o-re4vuxlksb/668688824393/CloudTrail/eu-central-1/2023/11/04/668688824393_CloudTrail_eu-central-1_20231104T2230Z_cvjIkDeGdNDHOgZs.json.gz, requestParameters.bucketName=aws-cloudtrail-logs-654338056632-cloudlake, responseElements.x-amz-server-side-encryption=AES256, recipientAccountId=654338056632, managementEvent=false, additionalEventData.bytesTransferredIn=1199} | -| 654338056632 | 3 | Update | {user={type=AWSService, name=null, uid=null, uuid=null, account_uid=null, credential_uid=null}, session={created_time=null, mfa=null, issuer=null}, invoked_by=cloudtrail.amazonaws.com, idp={name=null}} | {response={error=null, message=null}, operation=PutObject, version=null, service={name=s3.amazonaws.com}, request={uid=PN8HM14HVKG8ED12}} | Audit Activity | 3 | API Activity | 3005 | {region=eu-central-1, provider=AWS} | 20231104 | {user_agent=cloudtrail.amazonaws.com} | {product={version=1.09, name=CloudTrail, vendor_name=AWS, feature={name=Data}}, uid=6a5583e7-0463-439a-89c6-5431d3cd58fe, profiles=[cloud], version=1.0.0-rc.2} | b3c194e7-6580-421c-81fa-4b409e1ba04f | eu-central-1 | [{uid=arn:aws:s3:::aws-cloudtrail-logs-654338056632-cloudlake/AWSLogs/o-re4vuxlksb/654338056632/CloudTrail/eu-central-1/2023/11/04/654338056632_CloudTrail_eu-central-1_20231104T2235Z_yvpVSHH3613gs2AK.json.gz, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::aws-cloudtrail-logs-654338056632-cloudlake, account_uid=654338056632, type=AWS::S3::Bucket}] | Informational | 1 | {uid=null, ip=null, domain=cloudtrail.amazonaws.com} | Success | 1 | 1699137247000 | API Activity: Update | 300503 | {additionalEventData.SSEApplied=SSE_S3, requestParameters.x-amz-acl=bucket-owner-full-control, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, additionalEventData.x-amz-id-2=xm6fkjpc6B9awVdyl1jxnkfp+1boyZ3slsj3MtybxFfeJ+fhC84Il8k2jKEVlK91DdoRzM+RVIw=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=aws-cloudtrail-logs-654338056632-cloudlake.s3.eu-central-1.amazonaws.com, requestParameters.x-amz-server-side-encryption=AES256, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=083a753d-7aae-4dcb-b5d9-703cfab873b4, requestParameters.key=AWSLogs/o-re4vuxlksb/654338056632/CloudTrail/eu-central-1/2023/11/04/654338056632_CloudTrail_eu-central-1_20231104T2235Z_yvpVSHH3613gs2AK.json.gz, requestParameters.bucketName=aws-cloudtrail-logs-654338056632-cloudlake, responseElements.x-amz-server-side-encryption=AES256, recipientAccountId=654338056632, managementEvent=false, additionalEventData.bytesTransferredIn=1556} | -| 654338056632 | 3 | Update | {user={type=AWSService, name=null, uid=null, uuid=null, account_uid=null, credential_uid=null}, session={created_time=null, mfa=null, issuer=null}, invoked_by=cloudtrail.amazonaws.com, idp={name=null}} | {response={error=null, message=null}, operation=PutObject, version=null, service={name=s3.amazonaws.com}, request={uid=J3HPX1A1NNHGKFKH}} | Audit Activity | 3 | API Activity | 3005 | {region=eu-central-1, provider=AWS} | 20231107 | {user_agent=cloudtrail.amazonaws.com} | {product={version=1.09, name=CloudTrail, vendor_name=AWS, feature={name=Data}}, uid=e3cc1c86-950f-4201-a362-3f8f68631a83, profiles=[cloud], version=1.0.0-rc.2} | b3c194e7-6580-421c-81fa-4b409e1ba04f | eu-central-1 | [{uid=arn:aws:s3:::aws-cloudtrail-logs-654338056632-cloudlake/AWSLogs/o-re4vuxlksb/668688824393/CloudTrail/eu-central-1/2023/11/07/668688824393_CloudTrail_eu-central-1_20231107T0740Z_jtWHnUtmZvFb95ZI.json.gz, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::aws-cloudtrail-logs-654338056632-cloudlake, account_uid=654338056632, type=AWS::S3::Bucket}] | Informational | 1 | {uid=null, ip=null, domain=cloudtrail.amazonaws.com} | Success | 1 | 1699342905000 | API Activity: Update | 300503 | {additionalEventData.SSEApplied=SSE_S3, requestParameters.x-amz-acl=bucket-owner-full-control, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, additionalEventData.x-amz-id-2=o5fB4Haj65lh3PSSxZ3GImGDkSUUQ2Vy0qEH2kifNcpzIn9KWAL4VECS2HMMq8abRt7X9q1X3W9r0tioo1ytzQ==, readOnly=false, eventType=AwsApiCall, requestParameters.Host=aws-cloudtrail-logs-654338056632-cloudlake.s3.eu-central-1.amazonaws.com, requestParameters.x-amz-server-side-encryption=AES256, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=b1991000-4fff-4444-a70e-c615aae8db9e, requestParameters.key=AWSLogs/o-re4vuxlksb/668688824393/CloudTrail/eu-central-1/2023/11/07/668688824393_CloudTrail_eu-central-1_20231107T0740Z_jtWHnUtmZvFb95ZI.json.gz, requestParameters.bucketName=aws-cloudtrail-logs-654338056632-cloudlake, responseElements.x-amz-server-side-encryption=AES256, recipientAccountId=654338056632, managementEvent=false, additionalEventData.bytesTransferredIn=1580} | -| 654338056632 | 3 | Update | {user={type=AWSService, name=null, uid=null, uuid=null, account_uid=null, credential_uid=null}, session={created_time=null, mfa=null, issuer=null}, invoked_by=cloudtrail.amazonaws.com, idp={name=null}} | {response={error=null, message=null}, operation=PutObject, version=null, service={name=s3.amazonaws.com}, request={uid=0J1R23EBRVY6T7ZJ}} | Audit Activity | 3 | API Activity | 3005 | {region=eu-central-1, provider=AWS} | 20231107 | {user_agent=cloudtrail.amazonaws.com} | {product={version=1.09, name=CloudTrail, vendor_name=AWS, feature={name=Data}}, uid=85ca9b31-7607-40bd-8f65-0e6f892550a5, profiles=[cloud], version=1.0.0-rc.2} | b3c194e7-6580-421c-81fa-4b409e1ba04f | eu-central-1 | [{uid=arn:aws:s3:::aws-cloudtrail-logs-654338056632-cloudlake/AWSLogs/o-re4vuxlksb/668688824393/CloudTrail/eu-central-1/2023/11/07/668688824393_CloudTrail_eu-central-1_20231107T0740Z_xvmpLIGhOzmt4n3B.json.gz, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::aws-cloudtrail-logs-654338056632-cloudlake, account_uid=654338056632, type=AWS::S3::Bucket}] | Informational | 1 | {uid=null, ip=null, domain=cloudtrail.amazonaws.com} | Success | 1 | 1699342908000 | API Activity: Update | 300503 | {additionalEventData.SSEApplied=SSE_S3, requestParameters.x-amz-acl=bucket-owner-full-control, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, additionalEventData.x-amz-id-2=iypPsK3E1swmRKTBokcowNGxPCNWvgnjXmMyOOvajrl8bGqbQVTmPUbXGLOYR2z553KaKn/HZF0=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=aws-cloudtrail-logs-654338056632-cloudlake.s3.eu-central-1.amazonaws.com, requestParameters.x-amz-server-side-encryption=AES256, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=d0089b3a-0175-4732-8726-aa8b83557c19, requestParameters.key=AWSLogs/o-re4vuxlksb/668688824393/CloudTrail/eu-central-1/2023/11/07/668688824393_CloudTrail_eu-central-1_20231107T0740Z_xvmpLIGhOzmt4n3B.json.gz, requestParameters.bucketName=aws-cloudtrail-logs-654338056632-cloudlake, responseElements.x-amz-server-side-encryption=AES256, recipientAccountId=654338056632, managementEvent=false, additionalEventData.bytesTransferredIn=2276} | -| 654338056632 | 3 | Update | {user={type=AWSService, name=null, uid=null, uuid=null, account_uid=null, credential_uid=null}, session={created_time=null, mfa=null, issuer=null}, invoked_by=s3.amazonaws.com, idp={name=null}} | {response={error=null, message=null}, operation=PutObject, version=null, service={name=s3.amazonaws.com}, request={uid=BW3N96A9B3H9MVBS}} | Audit Activity | 3 | API Activity | 3005 | {region=eu-central-1, provider=AWS} | 20231107 | {user_agent=s3.amazonaws.com} | {product={version=1.09, name=CloudTrail, vendor_name=AWS, feature={name=Data}}, uid=a7a07526-e890-4697-85a5-3d9cf4ab1e9b, profiles=[cloud], version=1.0.0-rc.2} | b3c194e7-6580-421c-81fa-4b409e1ba04f | eu-central-1 | [{uid=arn:aws:s3:::test-log-collector/2023-11-07-07-41-57-110CCCE05A2BEE37, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::test-log-collector, account_uid=654338056632, type=AWS::S3::Bucket}] | Informational | 1 | {uid=vpce-59a25a30, ip=null, domain=s3.amazonaws.com} | Success | 1 | 1699342917000 | API Activity: Update | 300503 | {additionalEventData.SSEApplied=Default_SSE_S3, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, responseElements.x-amz-version-id=193ieSvoNFv1o.tUJKgchRloNHbERbcu, additionalEventData.x-amz-id-2=jIV/CP92IrpLpnNRe2zwJj9+c9Rg3EsUyM2AIRSYR19hm8Umi4gnDdP9NrDJszZO4EkOgWiJQz8=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=s3.eu-central-1.amazonaws.com, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=80ec7a41-6396-453a-8ec9-993c46b371be, requestParameters.key=2023-11-07-07-41-57-110CCCE05A2BEE37, requestParameters.bucketName=test-log-collector, responseElements.x-amz-server-side-encryption=AES256, responseElements.x-amz-expiration=expiry-date="Wed, 15 Nov 2023 00:00:00 GMT", rule-id="Delete-older-the-7-days", recipientAccountId=654338056632, managementEvent=false, additionalEventData.bytesTransferredIn=480} | -| 654338056632 | 3 | Update | {user={type=AWSService, name=null, uid=null, uuid=null, account_uid=null, credential_uid=null}, session={created_time=null, mfa=null, issuer=null}, invoked_by=s3.amazonaws.com, idp={name=null}} | {response={error=null, message=null}, operation=PutObject, version=null, service={name=s3.amazonaws.com}, request={uid=8A48J06NSTRZZH41}} | Audit Activity | 3 | API Activity | 3005 | {region=eu-central-1, provider=AWS} | 20231107 | {user_agent=s3.amazonaws.com} | {product={version=1.09, name=CloudTrail, vendor_name=AWS, feature={name=Data}}, uid=8f60fe7d-1b50-42f5-956c-cd0f60fc98ed, profiles=[cloud], version=1.0.0-rc.2} | b3c194e7-6580-421c-81fa-4b409e1ba04f | eu-central-1 | [{uid=arn:aws:s3:::test-log-collector/2023-11-07-07-42-28-FC0CDE158967D4CF, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::test-log-collector, account_uid=654338056632, type=AWS::S3::Bucket}] | Informational | 1 | {uid=vpce-59a25a30, ip=null, domain=s3.amazonaws.com} | Success | 1 | 1699342948000 | API Activity: Update | 300503 | {additionalEventData.SSEApplied=Default_SSE_S3, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, responseElements.x-amz-version-id=IBZgLYMsVU50YUHYOt9rQ1R28oTy_rHd, additionalEventData.x-amz-id-2=TCnuNhf35xx4buXCo34P8TdlAV4hkDit07I8iUuqpjcyvWOXRGuPAuFGd+5tbmPRXN5+MbY57zU=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=s3.eu-central-1.amazonaws.com, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=93494c71-ec89-4b9a-8607-6f8ff7460ff9, requestParameters.key=2023-11-07-07-42-28-FC0CDE158967D4CF, requestParameters.bucketName=test-log-collector, responseElements.x-amz-server-side-encryption=AES256, responseElements.x-amz-expiration=expiry-date="Wed, 15 Nov 2023 00:00:00 GMT", rule-id="Delete-older-the-7-days", recipientAccountId=654338056632, managementEvent=false, additionalEventData.bytesTransferredIn=960} | +| 123456789012 | 3 | Update | {user={type=AWSService, name=null, uid=null, uuid=null, account_uid=null, credential_uid=null}, session={created_time=null, mfa=null, issuer=null}, invoked_by=s3.amazonaws.com, idp={name=null}} | {response={error=null, message=null}, operation=PutObject, version=null, service={name=s3.amazonaws.com}, request={uid=5AV2YZSR7D9DFDW8}} | Audit Activity | 3 | API Activity | 3005 | {region=eu-central-1, provider=AWS} | 20231107 | {user_agent=s3.amazonaws.com} | {product={version=1.09, name=CloudTrail, vendor_name=AWS, feature={name=Data}}, uid=7c33bcd3-0252-4b28-b2c7-7f38ed881796, profiles=[cloud], version=1.0.0-rc.2} | b3c194e7-6580-421c-81fa-4b409e1ba04f | eu-central-1 | [{uid=arn:aws:s3:::test-log-collector/2023-11-07-07-40-08-CD1B1BC0934C71D4, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::test-log-collector, account_uid=123456789012, type=AWS::S3::Bucket}] | Informational | 1 | {uid=vpce-59a25a30, ip=null, domain=s3.amazonaws.com} | Success | 1 | 1699342808000 | API Activity: Update | 300503 | {additionalEventData.SSEApplied=Default_SSE_S3, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, responseElements.x-amz-version-id=CSwJV1zFxiBSHjrTGaIUH5FMMVcgl05W, additionalEventData.x-amz-id-2=71IAMvFJ3O5bJRBlpJCB3l0B8CzNy5sG7BECw2I1R4oHyvkV2FKixZqmYAir4Y5h5ldtxWA+xlk=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=s3.eu-central-1.amazonaws.com, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=6ffdc2dd-05ca-483e-8baa-842949fddced, requestParameters.key=2023-11-07-07-40-08-CD1B1BC0934C71D4, requestParameters.bucketName=test-log-collector, responseElements.x-amz-server-side-encryption=AES256, responseElements.x-amz-expiration=expiry-date="Wed, 15 Nov 2023 00:00:00 GMT", rule-id="Delete-older-the-7-days", recipientAccountId=123456789012, managementEvent=false, additionalEventData.bytesTransferredIn=480} | +| 123456789012 | 3 | Update | {user={type=AWSService, name=null, uid=null, uuid=null, account_uid=null, credential_uid=null}, session={created_time=null, mfa=null, issuer=null}, invoked_by=cloudtrail.amazonaws.com, idp={name=null}} | {response={error=null, message=null}, operation=PutObject, version=null, service={name=s3.amazonaws.com}, request={uid=BRW2SJ5SBFD7T91W}} | Audit Activity | 3 | API Activity | 3005 | {region=eu-central-1, provider=AWS} | 20231107 | {user_agent=cloudtrail.amazonaws.com} | {product={version=1.09, name=CloudTrail, vendor_name=AWS, feature={name=Data}}, uid=69d54eee-2c1c-4f51-b89f-45c7029695c6, profiles=[cloud], version=1.0.0-rc.2} | b3c194e7-6580-421c-81fa-4b409e1ba04f | eu-central-1 | [{uid=arn:aws:s3:::aws-cloudtrail-logs-123456789012-cloudlake/AWSLogs/o-re4vuxlksb/123456789012/CloudTrail/eu-central-1/2023/11/07/123456789012_CloudTrail_eu-central-1_20231107T0740Z_dimNGWjAynOPPS1e.json.gz, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::aws-cloudtrail-logs-123456789012-cloudlake, account_uid=123456789012, type=AWS::S3::Bucket}] | Informational | 1 | {uid=null, ip=null, domain=cloudtrail.amazonaws.com} | Success | 1 | 1699342833000 | API Activity: Update | 300503 | {additionalEventData.SSEApplied=SSE_S3, requestParameters.x-amz-acl=bucket-owner-full-control, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, additionalEventData.x-amz-id-2=CKIV9mfKLdZHOitE2UvDr21Km4whgr92dar3i5Ew4/upKPfXc97MP45lpxnGo1mhPW7RQgvuQEs=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=aws-cloudtrail-logs-123456789012-cloudlake.s3.eu-central-1.amazonaws.com, requestParameters.x-amz-server-side-encryption=AES256, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=cd4f5d26-4491-40fc-a7d9-cb10f3e99ed3, requestParameters.key=AWSLogs/o-re4vuxlksb/123456789012/CloudTrail/eu-central-1/2023/11/07/123456789012_CloudTrail_eu-central-1_20231107T0740Z_dimNGWjAynOPPS1e.json.gz, requestParameters.bucketName=aws-cloudtrail-logs-123456789012-cloudlake, responseElements.x-amz-server-side-encryption=AES256, recipientAccountId=123456789012, managementEvent=false, additionalEventData.bytesTransferredIn=2347} | +| 123456789012 | 3 | Update | {user={type=AWSService, name=null, uid=null, uuid=null, account_uid=null, credential_uid=null}, session={created_time=null, mfa=null, issuer=null}, invoked_by=s3.amazonaws.com, idp={name=null}} | {response={error=null, message=null}, operation=PutObject, version=null, service={name=s3.amazonaws.com}, request={uid=55TVMR6HYD2ABTWB}} | Audit Activity | 3 | API Activity | 3005 | {region=eu-central-1, provider=AWS} | 20231107 | {user_agent=s3.amazonaws.com} | {product={version=1.09, name=CloudTrail, vendor_name=AWS, feature={name=Data}}, uid=2a0b2b1a-bff6-4a89-93e5-801e3fdf3b92, profiles=[cloud], version=1.0.0-rc.2} | b3c194e7-6580-421c-81fa-4b409e1ba04f | eu-central-1 | [{uid=arn:aws:s3:::test-log-collector/2023-11-07-07-39-32-43DC7FEEAE7DFCA6, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::test-log-collector, account_uid=123456789012, type=AWS::S3::Bucket}] | Informational | 1 | {uid=vpce-59a25a30, ip=null, domain=s3.amazonaws.com} | Success | 1 | 1699342772000 | API Activity: Update | 300503 | {additionalEventData.SSEApplied=Default_SSE_S3, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, responseElements.x-amz-version-id=LXBDlV4KJJB56OUOvwPNlb1v4Re5gpKH, additionalEventData.x-amz-id-2=0/+WsTcJgOlNKdHK/L2FmlK7IyBHUCeZDaXhRKtYAlWrPy0oTMVVtX41yxCh3HE5s2YMLFpwyGc=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=s3.eu-central-1.amazonaws.com, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=ae9c5147-998a-4a83-ae76-dd1916795763, requestParameters.key=2023-11-07-07-39-32-43DC7FEEAE7DFCA6, requestParameters.bucketName=test-log-collector, responseElements.x-amz-server-side-encryption=AES256, responseElements.x-amz-expiration=expiry-date="Wed, 15 Nov 2023 00:00:00 GMT", rule-id="Delete-older-the-7-days", recipientAccountId=123456789012, managementEvent=false, additionalEventData.bytesTransferredIn=480} | +| 123456789012 | 3 | Update | {user={type=AWSService, name=null, uid=null, uuid=null, account_uid=null, credential_uid=null}, session={created_time=null, mfa=null, issuer=null}, invoked_by=cloudtrail.amazonaws.com, idp={name=null}} | {response={error=null, message=null}, operation=PutObject, version=null, service={name=s3.amazonaws.com}, request={uid=5E1S778MRDJEDVSR}} | Audit Activity | 3 | API Activity | 3005 | {region=eu-central-1, provider=AWS} | 20231104 | {user_agent=cloudtrail.amazonaws.com} | {product={version=1.09, name=CloudTrail, vendor_name=AWS, feature={name=Data}}, uid=fee096f2-611d-4b3c-b092-ea06e8a527ca, profiles=[cloud], version=1.0.0-rc.2} | b3c194e7-6580-421c-81fa-4b409e1ba04f | eu-central-1 | [{uid=arn:aws:s3:::aws-cloudtrail-logs-123456789012-cloudlake/AWSLogs/o-re4vuxlksb/123456789012/CloudTrail/eu-central-1/2023/11/04/123456789012_CloudTrail_eu-central-1_20231104T2230Z_Ei3HUNUHdxFCuo9l.json.gz, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::aws-cloudtrail-logs-123456789012-cloudlake, account_uid=123456789012, type=AWS::S3::Bucket}] | Informational | 1 | {uid=null, ip=null, domain=cloudtrail.amazonaws.com} | Success | 1 | 1699137065000 | API Activity: Update | 300503 | {additionalEventData.SSEApplied=SSE_S3, requestParameters.x-amz-acl=bucket-owner-full-control, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, additionalEventData.x-amz-id-2=RITtRiXX1DGn4aCJ1AguFwc0Ux/HS6LSgIjiGlJBFdeFWMwmqQk1TlibDKq5kIA2xrSZc/qyl8w=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=aws-cloudtrail-logs-123456789012-cloudlake.s3.eu-central-1.amazonaws.com, requestParameters.x-amz-server-side-encryption=AES256, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=150e3c90-ce64-4311-b9de-67ef7077751a, requestParameters.key=AWSLogs/o-re4vuxlksb/123456789012/CloudTrail/eu-central-1/2023/11/04/123456789012_CloudTrail_eu-central-1_20231104T2230Z_Ei3HUNUHdxFCuo9l.json.gz, requestParameters.bucketName=aws-cloudtrail-logs-123456789012-cloudlake, responseElements.x-amz-server-side-encryption=AES256, recipientAccountId=123456789012, managementEvent=false, additionalEventData.bytesTransferredIn=1317} | +| 123456789012 | 3 | Update | {user={type=AWSService, name=null, uid=null, uuid=null, account_uid=null, credential_uid=null}, session={created_time=null, mfa=null, issuer=null}, invoked_by=cloudtrail.amazonaws.com, idp={name=null}} | {response={error=null, message=null}, operation=PutObject, version=null, service={name=s3.amazonaws.com}, request={uid=VDKRDR9XNA8H2MF8}} | Audit Activity | 3 | API Activity | 3005 | {region=eu-central-1, provider=AWS} | 20231104 | {user_agent=cloudtrail.amazonaws.com} | {product={version=1.09, name=CloudTrail, vendor_name=AWS, feature={name=Data}}, uid=68481d66-4a3e-42fe-a878-a1e6a7176c16, profiles=[cloud], version=1.0.0-rc.2} | b3c194e7-6580-421c-81fa-4b409e1ba04f | eu-central-1 | [{uid=arn:aws:s3:::aws-cloudtrail-logs-123456789012-cloudlake/AWSLogs/o-re4vuxlksb/668688824393/CloudTrail/eu-central-1/2023/11/04/668688824393_CloudTrail_eu-central-1_20231104T2230Z_cvjIkDeGdNDHOgZs.json.gz, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::aws-cloudtrail-logs-123456789012-cloudlake, account_uid=123456789012, type=AWS::S3::Bucket}] | Informational | 1 | {uid=null, ip=null, domain=cloudtrail.amazonaws.com} | Success | 1 | 1699137170000 | API Activity: Update | 300503 | {additionalEventData.SSEApplied=SSE_S3, requestParameters.x-amz-acl=bucket-owner-full-control, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, additionalEventData.x-amz-id-2=p+d+4x2c5Cq78w5YiikRKYtXsRxTFKFiklo3uPsWV9VkYTbLqxQxlKSOvJ/3pyFMV0ghlX4jSiw=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=aws-cloudtrail-logs-123456789012-cloudlake.s3.eu-central-1.amazonaws.com, requestParameters.x-amz-server-side-encryption=AES256, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=f7f916ab-0dd4-4e99-b522-b415b9c80458, requestParameters.key=AWSLogs/o-re4vuxlksb/668688824393/CloudTrail/eu-central-1/2023/11/04/668688824393_CloudTrail_eu-central-1_20231104T2230Z_cvjIkDeGdNDHOgZs.json.gz, requestParameters.bucketName=aws-cloudtrail-logs-123456789012-cloudlake, responseElements.x-amz-server-side-encryption=AES256, recipientAccountId=123456789012, managementEvent=false, additionalEventData.bytesTransferredIn=1199} | +| 123456789012 | 3 | Update | {user={type=AWSService, name=null, uid=null, uuid=null, account_uid=null, credential_uid=null}, session={created_time=null, mfa=null, issuer=null}, invoked_by=cloudtrail.amazonaws.com, idp={name=null}} | {response={error=null, message=null}, operation=PutObject, version=null, service={name=s3.amazonaws.com}, request={uid=PN8HM14HVKG8ED12}} | Audit Activity | 3 | API Activity | 3005 | {region=eu-central-1, provider=AWS} | 20231104 | {user_agent=cloudtrail.amazonaws.com} | {product={version=1.09, name=CloudTrail, vendor_name=AWS, feature={name=Data}}, uid=6a5583e7-0463-439a-89c6-5431d3cd58fe, profiles=[cloud], version=1.0.0-rc.2} | b3c194e7-6580-421c-81fa-4b409e1ba04f | eu-central-1 | [{uid=arn:aws:s3:::aws-cloudtrail-logs-123456789012-cloudlake/AWSLogs/o-re4vuxlksb/123456789012/CloudTrail/eu-central-1/2023/11/04/123456789012_CloudTrail_eu-central-1_20231104T2235Z_yvpVSHH3613gs2AK.json.gz, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::aws-cloudtrail-logs-123456789012-cloudlake, account_uid=123456789012, type=AWS::S3::Bucket}] | Informational | 1 | {uid=null, ip=null, domain=cloudtrail.amazonaws.com} | Success | 1 | 1699137247000 | API Activity: Update | 300503 | {additionalEventData.SSEApplied=SSE_S3, requestParameters.x-amz-acl=bucket-owner-full-control, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, additionalEventData.x-amz-id-2=xm6fkjpc6B9awVdyl1jxnkfp+1boyZ3slsj3MtybxFfeJ+fhC84Il8k2jKEVlK91DdoRzM+RVIw=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=aws-cloudtrail-logs-123456789012-cloudlake.s3.eu-central-1.amazonaws.com, requestParameters.x-amz-server-side-encryption=AES256, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=083a753d-7aae-4dcb-b5d9-703cfab873b4, requestParameters.key=AWSLogs/o-re4vuxlksb/123456789012/CloudTrail/eu-central-1/2023/11/04/123456789012_CloudTrail_eu-central-1_20231104T2235Z_yvpVSHH3613gs2AK.json.gz, requestParameters.bucketName=aws-cloudtrail-logs-123456789012-cloudlake, responseElements.x-amz-server-side-encryption=AES256, recipientAccountId=123456789012, managementEvent=false, additionalEventData.bytesTransferredIn=1556} | +| 123456789012 | 3 | Update | {user={type=AWSService, name=null, uid=null, uuid=null, account_uid=null, credential_uid=null}, session={created_time=null, mfa=null, issuer=null}, invoked_by=cloudtrail.amazonaws.com, idp={name=null}} | {response={error=null, message=null}, operation=PutObject, version=null, service={name=s3.amazonaws.com}, request={uid=J3HPX1A1NNHGKFKH}} | Audit Activity | 3 | API Activity | 3005 | {region=eu-central-1, provider=AWS} | 20231107 | {user_agent=cloudtrail.amazonaws.com} | {product={version=1.09, name=CloudTrail, vendor_name=AWS, feature={name=Data}}, uid=e3cc1c86-950f-4201-a362-3f8f68631a83, profiles=[cloud], version=1.0.0-rc.2} | b3c194e7-6580-421c-81fa-4b409e1ba04f | eu-central-1 | [{uid=arn:aws:s3:::aws-cloudtrail-logs-123456789012-cloudlake/AWSLogs/o-re4vuxlksb/668688824393/CloudTrail/eu-central-1/2023/11/07/668688824393_CloudTrail_eu-central-1_20231107T0740Z_jtWHnUtmZvFb95ZI.json.gz, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::aws-cloudtrail-logs-123456789012-cloudlake, account_uid=123456789012, type=AWS::S3::Bucket}] | Informational | 1 | {uid=null, ip=null, domain=cloudtrail.amazonaws.com} | Success | 1 | 1699342905000 | API Activity: Update | 300503 | {additionalEventData.SSEApplied=SSE_S3, requestParameters.x-amz-acl=bucket-owner-full-control, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, additionalEventData.x-amz-id-2=o5fB4Haj65lh3PSSxZ3GImGDkSUUQ2Vy0qEH2kifNcpzIn9KWAL4VECS2HMMq8abRt7X9q1X3W9r0tioo1ytzQ==, readOnly=false, eventType=AwsApiCall, requestParameters.Host=aws-cloudtrail-logs-123456789012-cloudlake.s3.eu-central-1.amazonaws.com, requestParameters.x-amz-server-side-encryption=AES256, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=b1991000-4fff-4444-a70e-c615aae8db9e, requestParameters.key=AWSLogs/o-re4vuxlksb/668688824393/CloudTrail/eu-central-1/2023/11/07/668688824393_CloudTrail_eu-central-1_20231107T0740Z_jtWHnUtmZvFb95ZI.json.gz, requestParameters.bucketName=aws-cloudtrail-logs-123456789012-cloudlake, responseElements.x-amz-server-side-encryption=AES256, recipientAccountId=123456789012, managementEvent=false, additionalEventData.bytesTransferredIn=1580} | +| 123456789012 | 3 | Update | {user={type=AWSService, name=null, uid=null, uuid=null, account_uid=null, credential_uid=null}, session={created_time=null, mfa=null, issuer=null}, invoked_by=cloudtrail.amazonaws.com, idp={name=null}} | {response={error=null, message=null}, operation=PutObject, version=null, service={name=s3.amazonaws.com}, request={uid=0J1R23EBRVY6T7ZJ}} | Audit Activity | 3 | API Activity | 3005 | {region=eu-central-1, provider=AWS} | 20231107 | {user_agent=cloudtrail.amazonaws.com} | {product={version=1.09, name=CloudTrail, vendor_name=AWS, feature={name=Data}}, uid=85ca9b31-7607-40bd-8f65-0e6f892550a5, profiles=[cloud], version=1.0.0-rc.2} | b3c194e7-6580-421c-81fa-4b409e1ba04f | eu-central-1 | [{uid=arn:aws:s3:::aws-cloudtrail-logs-123456789012-cloudlake/AWSLogs/o-re4vuxlksb/668688824393/CloudTrail/eu-central-1/2023/11/07/668688824393_CloudTrail_eu-central-1_20231107T0740Z_xvmpLIGhOzmt4n3B.json.gz, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::aws-cloudtrail-logs-123456789012-cloudlake, account_uid=123456789012, type=AWS::S3::Bucket}] | Informational | 1 | {uid=null, ip=null, domain=cloudtrail.amazonaws.com} | Success | 1 | 1699342908000 | API Activity: Update | 300503 | {additionalEventData.SSEApplied=SSE_S3, requestParameters.x-amz-acl=bucket-owner-full-control, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, additionalEventData.x-amz-id-2=iypPsK3E1swmRKTBokcowNGxPCNWvgnjXmMyOOvajrl8bGqbQVTmPUbXGLOYR2z553KaKn/HZF0=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=aws-cloudtrail-logs-123456789012-cloudlake.s3.eu-central-1.amazonaws.com, requestParameters.x-amz-server-side-encryption=AES256, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=d0089b3a-0175-4732-8726-aa8b83557c19, requestParameters.key=AWSLogs/o-re4vuxlksb/668688824393/CloudTrail/eu-central-1/2023/11/07/668688824393_CloudTrail_eu-central-1_20231107T0740Z_xvmpLIGhOzmt4n3B.json.gz, requestParameters.bucketName=aws-cloudtrail-logs-123456789012-cloudlake, responseElements.x-amz-server-side-encryption=AES256, recipientAccountId=123456789012, managementEvent=false, additionalEventData.bytesTransferredIn=2276} | +| 123456789012 | 3 | Update | {user={type=AWSService, name=null, uid=null, uuid=null, account_uid=null, credential_uid=null}, session={created_time=null, mfa=null, issuer=null}, invoked_by=s3.amazonaws.com, idp={name=null}} | {response={error=null, message=null}, operation=PutObject, version=null, service={name=s3.amazonaws.com}, request={uid=BW3N96A9B3H9MVBS}} | Audit Activity | 3 | API Activity | 3005 | {region=eu-central-1, provider=AWS} | 20231107 | {user_agent=s3.amazonaws.com} | {product={version=1.09, name=CloudTrail, vendor_name=AWS, feature={name=Data}}, uid=a7a07526-e890-4697-85a5-3d9cf4ab1e9b, profiles=[cloud], version=1.0.0-rc.2} | b3c194e7-6580-421c-81fa-4b409e1ba04f | eu-central-1 | [{uid=arn:aws:s3:::test-log-collector/2023-11-07-07-41-57-110CCCE05A2BEE37, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::test-log-collector, account_uid=123456789012, type=AWS::S3::Bucket}] | Informational | 1 | {uid=vpce-59a25a30, ip=null, domain=s3.amazonaws.com} | Success | 1 | 1699342917000 | API Activity: Update | 300503 | {additionalEventData.SSEApplied=Default_SSE_S3, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, responseElements.x-amz-version-id=193ieSvoNFv1o.tUJKgchRloNHbERbcu, additionalEventData.x-amz-id-2=jIV/CP92IrpLpnNRe2zwJj9+c9Rg3EsUyM2AIRSYR19hm8Umi4gnDdP9NrDJszZO4EkOgWiJQz8=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=s3.eu-central-1.amazonaws.com, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=80ec7a41-6396-453a-8ec9-993c46b371be, requestParameters.key=2023-11-07-07-41-57-110CCCE05A2BEE37, requestParameters.bucketName=test-log-collector, responseElements.x-amz-server-side-encryption=AES256, responseElements.x-amz-expiration=expiry-date="Wed, 15 Nov 2023 00:00:00 GMT", rule-id="Delete-older-the-7-days", recipientAccountId=123456789012, managementEvent=false, additionalEventData.bytesTransferredIn=480} | +| 123456789012 | 3 | Update | {user={type=AWSService, name=null, uid=null, uuid=null, account_uid=null, credential_uid=null}, session={created_time=null, mfa=null, issuer=null}, invoked_by=s3.amazonaws.com, idp={name=null}} | {response={error=null, message=null}, operation=PutObject, version=null, service={name=s3.amazonaws.com}, request={uid=8A48J06NSTRZZH41}} | Audit Activity | 3 | API Activity | 3005 | {region=eu-central-1, provider=AWS} | 20231107 | {user_agent=s3.amazonaws.com} | {product={version=1.09, name=CloudTrail, vendor_name=AWS, feature={name=Data}}, uid=8f60fe7d-1b50-42f5-956c-cd0f60fc98ed, profiles=[cloud], version=1.0.0-rc.2} | b3c194e7-6580-421c-81fa-4b409e1ba04f | eu-central-1 | [{uid=arn:aws:s3:::test-log-collector/2023-11-07-07-42-28-FC0CDE158967D4CF, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::test-log-collector, account_uid=123456789012, type=AWS::S3::Bucket}] | Informational | 1 | {uid=vpce-59a25a30, ip=null, domain=s3.amazonaws.com} | Success | 1 | 1699342948000 | API Activity: Update | 300503 | {additionalEventData.SSEApplied=Default_SSE_S3, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, responseElements.x-amz-version-id=IBZgLYMsVU50YUHYOt9rQ1R28oTy_rHd, additionalEventData.x-amz-id-2=TCnuNhf35xx4buXCo34P8TdlAV4hkDit07I8iUuqpjcyvWOXRGuPAuFGd+5tbmPRXN5+MbY57zU=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=s3.eu-central-1.amazonaws.com, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=93494c71-ec89-4b9a-8607-6f8ff7460ff9, requestParameters.key=2023-11-07-07-42-28-FC0CDE158967D4CF, requestParameters.bucketName=test-log-collector, responseElements.x-amz-server-side-encryption=AES256, responseElements.x-amz-expiration=expiry-date="Wed, 15 Nov 2023 00:00:00 GMT", rule-id="Delete-older-the-7-days", recipientAccountId=123456789012, managementEvent=false, additionalEventData.bytesTransferredIn=960} | diff --git a/Packs/AWS-SecurityLake/Integrations/AWSSecurityLake/test_data/raw_data_mock/get_query_results.json b/Packs/AWS-SecurityLake/Integrations/AWSSecurityLake/test_data/raw_data_mock/get_query_results.json index 25b9d0eb82d4..40b02f2f4452 100644 --- a/Packs/AWS-SecurityLake/Integrations/AWSSecurityLake/test_data/raw_data_mock/get_query_results.json +++ b/Packs/AWS-SecurityLake/Integrations/AWSSecurityLake/test_data/raw_data_mock/get_query_results.json @@ -112,7 +112,7 @@ "VarCharValue": "{uid=vpce-59a25a30, ip=null, domain=s3.amazonaws.com}" }, { - "VarCharValue": "[{uid=arn:aws:s3:::test-log-collector/2023-11-07-07-40-08-CD1B1BC0934C71D4, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::test-log-collector, account_uid=654338056632, type=AWS::S3::Bucket}]" + "VarCharValue": "[{uid=arn:aws:s3:::test-log-collector/2023-11-07-07-40-08-CD1B1BC0934C71D4, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::test-log-collector, account_uid=123456789012, type=AWS::S3::Bucket}]" }, { "VarCharValue": "API Activity" @@ -153,13 +153,13 @@ }, {}, { - "VarCharValue": "{additionalEventData.SSEApplied=Default_SSE_S3, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, responseElements.x-amz-version-id=CSwJV1zFxiBSHjrTGaIUH5FMMVcgl05W, additionalEventData.x-amz-id-2=71IAMvFJ3O5bJRBlpJCB3l0B8CzNy5sG7BECw2I1R4oHyvkV2FKixZqmYAir4Y5h5ldtxWA+xlk=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=s3.eu-central-1.amazonaws.com, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=6ffdc2dd-05ca-483e-8baa-842949fddced, requestParameters.key=2023-11-07-07-40-08-CD1B1BC0934C71D4, requestParameters.bucketName=test-log-collector, responseElements.x-amz-server-side-encryption=AES256, responseElements.x-amz-expiration=expiry-date=\"Wed, 15 Nov 2023 00:00:00 GMT\", rule-id=\"Delete-older-the-7-days\", recipientAccountId=654338056632, managementEvent=false, additionalEventData.bytesTransferredIn=480}" + "VarCharValue": "{additionalEventData.SSEApplied=Default_SSE_S3, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, responseElements.x-amz-version-id=CSwJV1zFxiBSHjrTGaIUH5FMMVcgl05W, additionalEventData.x-amz-id-2=71IAMvFJ3O5bJRBlpJCB3l0B8CzNy5sG7BECw2I1R4oHyvkV2FKixZqmYAir4Y5h5ldtxWA+xlk=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=s3.eu-central-1.amazonaws.com, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=6ffdc2dd-05ca-483e-8baa-842949fddced, requestParameters.key=2023-11-07-07-40-08-CD1B1BC0934C71D4, requestParameters.bucketName=test-log-collector, responseElements.x-amz-server-side-encryption=AES256, responseElements.x-amz-expiration=expiry-date=\"Wed, 15 Nov 2023 00:00:00 GMT\", rule-id=\"Delete-older-the-7-days\", recipientAccountId=123456789012, managementEvent=false, additionalEventData.bytesTransferredIn=480}" }, { "VarCharValue": "eu-central-1" }, { - "VarCharValue": "654338056632" + "VarCharValue": "123456789012" }, { "VarCharValue": "20231107" @@ -191,7 +191,7 @@ "VarCharValue": "{uid=null, ip=null, domain=cloudtrail.amazonaws.com}" }, { - "VarCharValue": "[{uid=arn:aws:s3:::aws-cloudtrail-logs-654338056632-cloudlake/AWSLogs/o-re4vuxlksb/654338056632/CloudTrail/eu-central-1/2023/11/07/654338056632_CloudTrail_eu-central-1_20231107T0740Z_dimNGWjAynOPPS1e.json.gz, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::aws-cloudtrail-logs-654338056632-cloudlake, account_uid=654338056632, type=AWS::S3::Bucket}]" + "VarCharValue": "[{uid=arn:aws:s3:::aws-cloudtrail-logs-123456789012-cloudlake/AWSLogs/o-re4vuxlksb/123456789012/CloudTrail/eu-central-1/2023/11/07/123456789012_CloudTrail_eu-central-1_20231107T0740Z_dimNGWjAynOPPS1e.json.gz, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::aws-cloudtrail-logs-123456789012-cloudlake, account_uid=123456789012, type=AWS::S3::Bucket}]" }, { "VarCharValue": "API Activity" @@ -232,13 +232,13 @@ }, {}, { - "VarCharValue": "{additionalEventData.SSEApplied=SSE_S3, requestParameters.x-amz-acl=bucket-owner-full-control, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, additionalEventData.x-amz-id-2=CKIV9mfKLdZHOitE2UvDr21Km4whgr92dar3i5Ew4/upKPfXc97MP45lpxnGo1mhPW7RQgvuQEs=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=aws-cloudtrail-logs-654338056632-cloudlake.s3.eu-central-1.amazonaws.com, requestParameters.x-amz-server-side-encryption=AES256, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=cd4f5d26-4491-40fc-a7d9-cb10f3e99ed3, requestParameters.key=AWSLogs/o-re4vuxlksb/654338056632/CloudTrail/eu-central-1/2023/11/07/654338056632_CloudTrail_eu-central-1_20231107T0740Z_dimNGWjAynOPPS1e.json.gz, requestParameters.bucketName=aws-cloudtrail-logs-654338056632-cloudlake, responseElements.x-amz-server-side-encryption=AES256, recipientAccountId=654338056632, managementEvent=false, additionalEventData.bytesTransferredIn=2347}" + "VarCharValue": "{additionalEventData.SSEApplied=SSE_S3, requestParameters.x-amz-acl=bucket-owner-full-control, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, additionalEventData.x-amz-id-2=CKIV9mfKLdZHOitE2UvDr21Km4whgr92dar3i5Ew4/upKPfXc97MP45lpxnGo1mhPW7RQgvuQEs=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=aws-cloudtrail-logs-123456789012-cloudlake.s3.eu-central-1.amazonaws.com, requestParameters.x-amz-server-side-encryption=AES256, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=cd4f5d26-4491-40fc-a7d9-cb10f3e99ed3, requestParameters.key=AWSLogs/o-re4vuxlksb/123456789012/CloudTrail/eu-central-1/2023/11/07/123456789012_CloudTrail_eu-central-1_20231107T0740Z_dimNGWjAynOPPS1e.json.gz, requestParameters.bucketName=aws-cloudtrail-logs-123456789012-cloudlake, responseElements.x-amz-server-side-encryption=AES256, recipientAccountId=123456789012, managementEvent=false, additionalEventData.bytesTransferredIn=2347}" }, { "VarCharValue": "eu-central-1" }, { - "VarCharValue": "654338056632" + "VarCharValue": "123456789012" }, { "VarCharValue": "20231107" @@ -270,7 +270,7 @@ "VarCharValue": "{uid=vpce-59a25a30, ip=null, domain=s3.amazonaws.com}" }, { - "VarCharValue": "[{uid=arn:aws:s3:::test-log-collector/2023-11-07-07-39-32-43DC7FEEAE7DFCA6, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::test-log-collector, account_uid=654338056632, type=AWS::S3::Bucket}]" + "VarCharValue": "[{uid=arn:aws:s3:::test-log-collector/2023-11-07-07-39-32-43DC7FEEAE7DFCA6, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::test-log-collector, account_uid=123456789012, type=AWS::S3::Bucket}]" }, { "VarCharValue": "API Activity" @@ -311,13 +311,13 @@ }, {}, { - "VarCharValue": "{additionalEventData.SSEApplied=Default_SSE_S3, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, responseElements.x-amz-version-id=LXBDlV4KJJB56OUOvwPNlb1v4Re5gpKH, additionalEventData.x-amz-id-2=0/+WsTcJgOlNKdHK/L2FmlK7IyBHUCeZDaXhRKtYAlWrPy0oTMVVtX41yxCh3HE5s2YMLFpwyGc=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=s3.eu-central-1.amazonaws.com, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=ae9c5147-998a-4a83-ae76-dd1916795763, requestParameters.key=2023-11-07-07-39-32-43DC7FEEAE7DFCA6, requestParameters.bucketName=test-log-collector, responseElements.x-amz-server-side-encryption=AES256, responseElements.x-amz-expiration=expiry-date=\"Wed, 15 Nov 2023 00:00:00 GMT\", rule-id=\"Delete-older-the-7-days\", recipientAccountId=654338056632, managementEvent=false, additionalEventData.bytesTransferredIn=480}" + "VarCharValue": "{additionalEventData.SSEApplied=Default_SSE_S3, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, responseElements.x-amz-version-id=LXBDlV4KJJB56OUOvwPNlb1v4Re5gpKH, additionalEventData.x-amz-id-2=0/+WsTcJgOlNKdHK/L2FmlK7IyBHUCeZDaXhRKtYAlWrPy0oTMVVtX41yxCh3HE5s2YMLFpwyGc=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=s3.eu-central-1.amazonaws.com, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=ae9c5147-998a-4a83-ae76-dd1916795763, requestParameters.key=2023-11-07-07-39-32-43DC7FEEAE7DFCA6, requestParameters.bucketName=test-log-collector, responseElements.x-amz-server-side-encryption=AES256, responseElements.x-amz-expiration=expiry-date=\"Wed, 15 Nov 2023 00:00:00 GMT\", rule-id=\"Delete-older-the-7-days\", recipientAccountId=123456789012, managementEvent=false, additionalEventData.bytesTransferredIn=480}" }, { "VarCharValue": "eu-central-1" }, { - "VarCharValue": "654338056632" + "VarCharValue": "123456789012" }, { "VarCharValue": "20231107" @@ -349,7 +349,7 @@ "VarCharValue": "{uid=null, ip=null, domain=cloudtrail.amazonaws.com}" }, { - "VarCharValue": "[{uid=arn:aws:s3:::aws-cloudtrail-logs-654338056632-cloudlake/AWSLogs/o-re4vuxlksb/654338056632/CloudTrail/eu-central-1/2023/11/04/654338056632_CloudTrail_eu-central-1_20231104T2230Z_Ei3HUNUHdxFCuo9l.json.gz, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::aws-cloudtrail-logs-654338056632-cloudlake, account_uid=654338056632, type=AWS::S3::Bucket}]" + "VarCharValue": "[{uid=arn:aws:s3:::aws-cloudtrail-logs-123456789012-cloudlake/AWSLogs/o-re4vuxlksb/123456789012/CloudTrail/eu-central-1/2023/11/04/123456789012_CloudTrail_eu-central-1_20231104T2230Z_Ei3HUNUHdxFCuo9l.json.gz, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::aws-cloudtrail-logs-123456789012-cloudlake, account_uid=123456789012, type=AWS::S3::Bucket}]" }, { "VarCharValue": "API Activity" @@ -390,13 +390,13 @@ }, {}, { - "VarCharValue": "{additionalEventData.SSEApplied=SSE_S3, requestParameters.x-amz-acl=bucket-owner-full-control, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, additionalEventData.x-amz-id-2=RITtRiXX1DGn4aCJ1AguFwc0Ux/HS6LSgIjiGlJBFdeFWMwmqQk1TlibDKq5kIA2xrSZc/qyl8w=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=aws-cloudtrail-logs-654338056632-cloudlake.s3.eu-central-1.amazonaws.com, requestParameters.x-amz-server-side-encryption=AES256, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=150e3c90-ce64-4311-b9de-67ef7077751a, requestParameters.key=AWSLogs/o-re4vuxlksb/654338056632/CloudTrail/eu-central-1/2023/11/04/654338056632_CloudTrail_eu-central-1_20231104T2230Z_Ei3HUNUHdxFCuo9l.json.gz, requestParameters.bucketName=aws-cloudtrail-logs-654338056632-cloudlake, responseElements.x-amz-server-side-encryption=AES256, recipientAccountId=654338056632, managementEvent=false, additionalEventData.bytesTransferredIn=1317}" + "VarCharValue": "{additionalEventData.SSEApplied=SSE_S3, requestParameters.x-amz-acl=bucket-owner-full-control, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, additionalEventData.x-amz-id-2=RITtRiXX1DGn4aCJ1AguFwc0Ux/HS6LSgIjiGlJBFdeFWMwmqQk1TlibDKq5kIA2xrSZc/qyl8w=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=aws-cloudtrail-logs-123456789012-cloudlake.s3.eu-central-1.amazonaws.com, requestParameters.x-amz-server-side-encryption=AES256, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=150e3c90-ce64-4311-b9de-67ef7077751a, requestParameters.key=AWSLogs/o-re4vuxlksb/123456789012/CloudTrail/eu-central-1/2023/11/04/123456789012_CloudTrail_eu-central-1_20231104T2230Z_Ei3HUNUHdxFCuo9l.json.gz, requestParameters.bucketName=aws-cloudtrail-logs-123456789012-cloudlake, responseElements.x-amz-server-side-encryption=AES256, recipientAccountId=123456789012, managementEvent=false, additionalEventData.bytesTransferredIn=1317}" }, { "VarCharValue": "eu-central-1" }, { - "VarCharValue": "654338056632" + "VarCharValue": "123456789012" }, { "VarCharValue": "20231104" @@ -428,7 +428,7 @@ "VarCharValue": "{uid=null, ip=null, domain=cloudtrail.amazonaws.com}" }, { - "VarCharValue": "[{uid=arn:aws:s3:::aws-cloudtrail-logs-654338056632-cloudlake/AWSLogs/o-re4vuxlksb/668688824393/CloudTrail/eu-central-1/2023/11/04/668688824393_CloudTrail_eu-central-1_20231104T2230Z_cvjIkDeGdNDHOgZs.json.gz, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::aws-cloudtrail-logs-654338056632-cloudlake, account_uid=654338056632, type=AWS::S3::Bucket}]" + "VarCharValue": "[{uid=arn:aws:s3:::aws-cloudtrail-logs-123456789012-cloudlake/AWSLogs/o-re4vuxlksb/668688824393/CloudTrail/eu-central-1/2023/11/04/668688824393_CloudTrail_eu-central-1_20231104T2230Z_cvjIkDeGdNDHOgZs.json.gz, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::aws-cloudtrail-logs-123456789012-cloudlake, account_uid=123456789012, type=AWS::S3::Bucket}]" }, { "VarCharValue": "API Activity" @@ -469,13 +469,13 @@ }, {}, { - "VarCharValue": "{additionalEventData.SSEApplied=SSE_S3, requestParameters.x-amz-acl=bucket-owner-full-control, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, additionalEventData.x-amz-id-2=p+d+4x2c5Cq78w5YiikRKYtXsRxTFKFiklo3uPsWV9VkYTbLqxQxlKSOvJ/3pyFMV0ghlX4jSiw=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=aws-cloudtrail-logs-654338056632-cloudlake.s3.eu-central-1.amazonaws.com, requestParameters.x-amz-server-side-encryption=AES256, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=f7f916ab-0dd4-4e99-b522-b415b9c80458, requestParameters.key=AWSLogs/o-re4vuxlksb/668688824393/CloudTrail/eu-central-1/2023/11/04/668688824393_CloudTrail_eu-central-1_20231104T2230Z_cvjIkDeGdNDHOgZs.json.gz, requestParameters.bucketName=aws-cloudtrail-logs-654338056632-cloudlake, responseElements.x-amz-server-side-encryption=AES256, recipientAccountId=654338056632, managementEvent=false, additionalEventData.bytesTransferredIn=1199}" + "VarCharValue": "{additionalEventData.SSEApplied=SSE_S3, requestParameters.x-amz-acl=bucket-owner-full-control, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, additionalEventData.x-amz-id-2=p+d+4x2c5Cq78w5YiikRKYtXsRxTFKFiklo3uPsWV9VkYTbLqxQxlKSOvJ/3pyFMV0ghlX4jSiw=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=aws-cloudtrail-logs-123456789012-cloudlake.s3.eu-central-1.amazonaws.com, requestParameters.x-amz-server-side-encryption=AES256, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=f7f916ab-0dd4-4e99-b522-b415b9c80458, requestParameters.key=AWSLogs/o-re4vuxlksb/668688824393/CloudTrail/eu-central-1/2023/11/04/668688824393_CloudTrail_eu-central-1_20231104T2230Z_cvjIkDeGdNDHOgZs.json.gz, requestParameters.bucketName=aws-cloudtrail-logs-123456789012-cloudlake, responseElements.x-amz-server-side-encryption=AES256, recipientAccountId=123456789012, managementEvent=false, additionalEventData.bytesTransferredIn=1199}" }, { "VarCharValue": "eu-central-1" }, { - "VarCharValue": "654338056632" + "VarCharValue": "123456789012" }, { "VarCharValue": "20231104" @@ -507,7 +507,7 @@ "VarCharValue": "{uid=null, ip=null, domain=cloudtrail.amazonaws.com}" }, { - "VarCharValue": "[{uid=arn:aws:s3:::aws-cloudtrail-logs-654338056632-cloudlake/AWSLogs/o-re4vuxlksb/654338056632/CloudTrail/eu-central-1/2023/11/04/654338056632_CloudTrail_eu-central-1_20231104T2235Z_yvpVSHH3613gs2AK.json.gz, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::aws-cloudtrail-logs-654338056632-cloudlake, account_uid=654338056632, type=AWS::S3::Bucket}]" + "VarCharValue": "[{uid=arn:aws:s3:::aws-cloudtrail-logs-123456789012-cloudlake/AWSLogs/o-re4vuxlksb/123456789012/CloudTrail/eu-central-1/2023/11/04/123456789012_CloudTrail_eu-central-1_20231104T2235Z_yvpVSHH3613gs2AK.json.gz, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::aws-cloudtrail-logs-123456789012-cloudlake, account_uid=123456789012, type=AWS::S3::Bucket}]" }, { "VarCharValue": "API Activity" @@ -548,13 +548,13 @@ }, {}, { - "VarCharValue": "{additionalEventData.SSEApplied=SSE_S3, requestParameters.x-amz-acl=bucket-owner-full-control, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, additionalEventData.x-amz-id-2=xm6fkjpc6B9awVdyl1jxnkfp+1boyZ3slsj3MtybxFfeJ+fhC84Il8k2jKEVlK91DdoRzM+RVIw=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=aws-cloudtrail-logs-654338056632-cloudlake.s3.eu-central-1.amazonaws.com, requestParameters.x-amz-server-side-encryption=AES256, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=083a753d-7aae-4dcb-b5d9-703cfab873b4, requestParameters.key=AWSLogs/o-re4vuxlksb/654338056632/CloudTrail/eu-central-1/2023/11/04/654338056632_CloudTrail_eu-central-1_20231104T2235Z_yvpVSHH3613gs2AK.json.gz, requestParameters.bucketName=aws-cloudtrail-logs-654338056632-cloudlake, responseElements.x-amz-server-side-encryption=AES256, recipientAccountId=654338056632, managementEvent=false, additionalEventData.bytesTransferredIn=1556}" + "VarCharValue": "{additionalEventData.SSEApplied=SSE_S3, requestParameters.x-amz-acl=bucket-owner-full-control, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, additionalEventData.x-amz-id-2=xm6fkjpc6B9awVdyl1jxnkfp+1boyZ3slsj3MtybxFfeJ+fhC84Il8k2jKEVlK91DdoRzM+RVIw=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=aws-cloudtrail-logs-123456789012-cloudlake.s3.eu-central-1.amazonaws.com, requestParameters.x-amz-server-side-encryption=AES256, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=083a753d-7aae-4dcb-b5d9-703cfab873b4, requestParameters.key=AWSLogs/o-re4vuxlksb/123456789012/CloudTrail/eu-central-1/2023/11/04/123456789012_CloudTrail_eu-central-1_20231104T2235Z_yvpVSHH3613gs2AK.json.gz, requestParameters.bucketName=aws-cloudtrail-logs-123456789012-cloudlake, responseElements.x-amz-server-side-encryption=AES256, recipientAccountId=123456789012, managementEvent=false, additionalEventData.bytesTransferredIn=1556}" }, { "VarCharValue": "eu-central-1" }, { - "VarCharValue": "654338056632" + "VarCharValue": "123456789012" }, { "VarCharValue": "20231104" @@ -586,7 +586,7 @@ "VarCharValue": "{uid=null, ip=null, domain=cloudtrail.amazonaws.com}" }, { - "VarCharValue": "[{uid=arn:aws:s3:::aws-cloudtrail-logs-654338056632-cloudlake/AWSLogs/o-re4vuxlksb/668688824393/CloudTrail/eu-central-1/2023/11/07/668688824393_CloudTrail_eu-central-1_20231107T0740Z_jtWHnUtmZvFb95ZI.json.gz, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::aws-cloudtrail-logs-654338056632-cloudlake, account_uid=654338056632, type=AWS::S3::Bucket}]" + "VarCharValue": "[{uid=arn:aws:s3:::aws-cloudtrail-logs-123456789012-cloudlake/AWSLogs/o-re4vuxlksb/668688824393/CloudTrail/eu-central-1/2023/11/07/668688824393_CloudTrail_eu-central-1_20231107T0740Z_jtWHnUtmZvFb95ZI.json.gz, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::aws-cloudtrail-logs-123456789012-cloudlake, account_uid=123456789012, type=AWS::S3::Bucket}]" }, { "VarCharValue": "API Activity" @@ -627,13 +627,13 @@ }, {}, { - "VarCharValue": "{additionalEventData.SSEApplied=SSE_S3, requestParameters.x-amz-acl=bucket-owner-full-control, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, additionalEventData.x-amz-id-2=o5fB4Haj65lh3PSSxZ3GImGDkSUUQ2Vy0qEH2kifNcpzIn9KWAL4VECS2HMMq8abRt7X9q1X3W9r0tioo1ytzQ==, readOnly=false, eventType=AwsApiCall, requestParameters.Host=aws-cloudtrail-logs-654338056632-cloudlake.s3.eu-central-1.amazonaws.com, requestParameters.x-amz-server-side-encryption=AES256, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=b1991000-4fff-4444-a70e-c615aae8db9e, requestParameters.key=AWSLogs/o-re4vuxlksb/668688824393/CloudTrail/eu-central-1/2023/11/07/668688824393_CloudTrail_eu-central-1_20231107T0740Z_jtWHnUtmZvFb95ZI.json.gz, requestParameters.bucketName=aws-cloudtrail-logs-654338056632-cloudlake, responseElements.x-amz-server-side-encryption=AES256, recipientAccountId=654338056632, managementEvent=false, additionalEventData.bytesTransferredIn=1580}" + "VarCharValue": "{additionalEventData.SSEApplied=SSE_S3, requestParameters.x-amz-acl=bucket-owner-full-control, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, additionalEventData.x-amz-id-2=o5fB4Haj65lh3PSSxZ3GImGDkSUUQ2Vy0qEH2kifNcpzIn9KWAL4VECS2HMMq8abRt7X9q1X3W9r0tioo1ytzQ==, readOnly=false, eventType=AwsApiCall, requestParameters.Host=aws-cloudtrail-logs-123456789012-cloudlake.s3.eu-central-1.amazonaws.com, requestParameters.x-amz-server-side-encryption=AES256, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=b1991000-4fff-4444-a70e-c615aae8db9e, requestParameters.key=AWSLogs/o-re4vuxlksb/668688824393/CloudTrail/eu-central-1/2023/11/07/668688824393_CloudTrail_eu-central-1_20231107T0740Z_jtWHnUtmZvFb95ZI.json.gz, requestParameters.bucketName=aws-cloudtrail-logs-123456789012-cloudlake, responseElements.x-amz-server-side-encryption=AES256, recipientAccountId=123456789012, managementEvent=false, additionalEventData.bytesTransferredIn=1580}" }, { "VarCharValue": "eu-central-1" }, { - "VarCharValue": "654338056632" + "VarCharValue": "123456789012" }, { "VarCharValue": "20231107" @@ -665,7 +665,7 @@ "VarCharValue": "{uid=null, ip=null, domain=cloudtrail.amazonaws.com}" }, { - "VarCharValue": "[{uid=arn:aws:s3:::aws-cloudtrail-logs-654338056632-cloudlake/AWSLogs/o-re4vuxlksb/668688824393/CloudTrail/eu-central-1/2023/11/07/668688824393_CloudTrail_eu-central-1_20231107T0740Z_xvmpLIGhOzmt4n3B.json.gz, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::aws-cloudtrail-logs-654338056632-cloudlake, account_uid=654338056632, type=AWS::S3::Bucket}]" + "VarCharValue": "[{uid=arn:aws:s3:::aws-cloudtrail-logs-123456789012-cloudlake/AWSLogs/o-re4vuxlksb/668688824393/CloudTrail/eu-central-1/2023/11/07/668688824393_CloudTrail_eu-central-1_20231107T0740Z_xvmpLIGhOzmt4n3B.json.gz, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::aws-cloudtrail-logs-123456789012-cloudlake, account_uid=123456789012, type=AWS::S3::Bucket}]" }, { "VarCharValue": "API Activity" @@ -706,13 +706,13 @@ }, {}, { - "VarCharValue": "{additionalEventData.SSEApplied=SSE_S3, requestParameters.x-amz-acl=bucket-owner-full-control, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, additionalEventData.x-amz-id-2=iypPsK3E1swmRKTBokcowNGxPCNWvgnjXmMyOOvajrl8bGqbQVTmPUbXGLOYR2z553KaKn/HZF0=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=aws-cloudtrail-logs-654338056632-cloudlake.s3.eu-central-1.amazonaws.com, requestParameters.x-amz-server-side-encryption=AES256, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=d0089b3a-0175-4732-8726-aa8b83557c19, requestParameters.key=AWSLogs/o-re4vuxlksb/668688824393/CloudTrail/eu-central-1/2023/11/07/668688824393_CloudTrail_eu-central-1_20231107T0740Z_xvmpLIGhOzmt4n3B.json.gz, requestParameters.bucketName=aws-cloudtrail-logs-654338056632-cloudlake, responseElements.x-amz-server-side-encryption=AES256, recipientAccountId=654338056632, managementEvent=false, additionalEventData.bytesTransferredIn=2276}" + "VarCharValue": "{additionalEventData.SSEApplied=SSE_S3, requestParameters.x-amz-acl=bucket-owner-full-control, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, additionalEventData.x-amz-id-2=iypPsK3E1swmRKTBokcowNGxPCNWvgnjXmMyOOvajrl8bGqbQVTmPUbXGLOYR2z553KaKn/HZF0=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=aws-cloudtrail-logs-123456789012-cloudlake.s3.eu-central-1.amazonaws.com, requestParameters.x-amz-server-side-encryption=AES256, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=d0089b3a-0175-4732-8726-aa8b83557c19, requestParameters.key=AWSLogs/o-re4vuxlksb/668688824393/CloudTrail/eu-central-1/2023/11/07/668688824393_CloudTrail_eu-central-1_20231107T0740Z_xvmpLIGhOzmt4n3B.json.gz, requestParameters.bucketName=aws-cloudtrail-logs-123456789012-cloudlake, responseElements.x-amz-server-side-encryption=AES256, recipientAccountId=123456789012, managementEvent=false, additionalEventData.bytesTransferredIn=2276}" }, { "VarCharValue": "eu-central-1" }, { - "VarCharValue": "654338056632" + "VarCharValue": "123456789012" }, { "VarCharValue": "20231107" @@ -744,7 +744,7 @@ "VarCharValue": "{uid=vpce-59a25a30, ip=null, domain=s3.amazonaws.com}" }, { - "VarCharValue": "[{uid=arn:aws:s3:::test-log-collector/2023-11-07-07-41-57-110CCCE05A2BEE37, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::test-log-collector, account_uid=654338056632, type=AWS::S3::Bucket}]" + "VarCharValue": "[{uid=arn:aws:s3:::test-log-collector/2023-11-07-07-41-57-110CCCE05A2BEE37, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::test-log-collector, account_uid=123456789012, type=AWS::S3::Bucket}]" }, { "VarCharValue": "API Activity" @@ -785,13 +785,13 @@ }, {}, { - "VarCharValue": "{additionalEventData.SSEApplied=Default_SSE_S3, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, responseElements.x-amz-version-id=193ieSvoNFv1o.tUJKgchRloNHbERbcu, additionalEventData.x-amz-id-2=jIV/CP92IrpLpnNRe2zwJj9+c9Rg3EsUyM2AIRSYR19hm8Umi4gnDdP9NrDJszZO4EkOgWiJQz8=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=s3.eu-central-1.amazonaws.com, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=80ec7a41-6396-453a-8ec9-993c46b371be, requestParameters.key=2023-11-07-07-41-57-110CCCE05A2BEE37, requestParameters.bucketName=test-log-collector, responseElements.x-amz-server-side-encryption=AES256, responseElements.x-amz-expiration=expiry-date=\"Wed, 15 Nov 2023 00:00:00 GMT\", rule-id=\"Delete-older-the-7-days\", recipientAccountId=654338056632, managementEvent=false, additionalEventData.bytesTransferredIn=480}" + "VarCharValue": "{additionalEventData.SSEApplied=Default_SSE_S3, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, responseElements.x-amz-version-id=193ieSvoNFv1o.tUJKgchRloNHbERbcu, additionalEventData.x-amz-id-2=jIV/CP92IrpLpnNRe2zwJj9+c9Rg3EsUyM2AIRSYR19hm8Umi4gnDdP9NrDJszZO4EkOgWiJQz8=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=s3.eu-central-1.amazonaws.com, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=80ec7a41-6396-453a-8ec9-993c46b371be, requestParameters.key=2023-11-07-07-41-57-110CCCE05A2BEE37, requestParameters.bucketName=test-log-collector, responseElements.x-amz-server-side-encryption=AES256, responseElements.x-amz-expiration=expiry-date=\"Wed, 15 Nov 2023 00:00:00 GMT\", rule-id=\"Delete-older-the-7-days\", recipientAccountId=123456789012, managementEvent=false, additionalEventData.bytesTransferredIn=480}" }, { "VarCharValue": "eu-central-1" }, { - "VarCharValue": "654338056632" + "VarCharValue": "123456789012" }, { "VarCharValue": "20231107" @@ -823,7 +823,7 @@ "VarCharValue": "{uid=vpce-59a25a30, ip=null, domain=s3.amazonaws.com}" }, { - "VarCharValue": "[{uid=arn:aws:s3:::test-log-collector/2023-11-07-07-42-28-FC0CDE158967D4CF, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::test-log-collector, account_uid=654338056632, type=AWS::S3::Bucket}]" + "VarCharValue": "[{uid=arn:aws:s3:::test-log-collector/2023-11-07-07-42-28-FC0CDE158967D4CF, account_uid=null, type=AWS::S3::Object}, {uid=arn:aws:s3:::test-log-collector, account_uid=123456789012, type=AWS::S3::Bucket}]" }, { "VarCharValue": "API Activity" @@ -864,13 +864,13 @@ }, {}, { - "VarCharValue": "{additionalEventData.SSEApplied=Default_SSE_S3, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, responseElements.x-amz-version-id=IBZgLYMsVU50YUHYOt9rQ1R28oTy_rHd, additionalEventData.x-amz-id-2=TCnuNhf35xx4buXCo34P8TdlAV4hkDit07I8iUuqpjcyvWOXRGuPAuFGd+5tbmPRXN5+MbY57zU=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=s3.eu-central-1.amazonaws.com, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=93494c71-ec89-4b9a-8607-6f8ff7460ff9, requestParameters.key=2023-11-07-07-42-28-FC0CDE158967D4CF, requestParameters.bucketName=test-log-collector, responseElements.x-amz-server-side-encryption=AES256, responseElements.x-amz-expiration=expiry-date=\"Wed, 15 Nov 2023 00:00:00 GMT\", rule-id=\"Delete-older-the-7-days\", recipientAccountId=654338056632, managementEvent=false, additionalEventData.bytesTransferredIn=960}" + "VarCharValue": "{additionalEventData.SSEApplied=Default_SSE_S3, additionalEventData.SignatureVersion=SigV4, additionalEventData.CipherSuite=ECDHE-RSA-AES128-GCM-SHA256, additionalEventData.bytesTransferredOut=0, responseElements.x-amz-version-id=IBZgLYMsVU50YUHYOt9rQ1R28oTy_rHd, additionalEventData.x-amz-id-2=TCnuNhf35xx4buXCo34P8TdlAV4hkDit07I8iUuqpjcyvWOXRGuPAuFGd+5tbmPRXN5+MbY57zU=, readOnly=false, eventType=AwsApiCall, requestParameters.Host=s3.eu-central-1.amazonaws.com, additionalEventData.AuthenticationMethod=AuthHeader, sharedEventID=93494c71-ec89-4b9a-8607-6f8ff7460ff9, requestParameters.key=2023-11-07-07-42-28-FC0CDE158967D4CF, requestParameters.bucketName=test-log-collector, responseElements.x-amz-server-side-encryption=AES256, responseElements.x-amz-expiration=expiry-date=\"Wed, 15 Nov 2023 00:00:00 GMT\", rule-id=\"Delete-older-the-7-days\", recipientAccountId=123456789012, managementEvent=false, additionalEventData.bytesTransferredIn=960}" }, { "VarCharValue": "eu-central-1" }, { - "VarCharValue": "654338056632" + "VarCharValue": "123456789012" }, { "VarCharValue": "20231107" diff --git a/Packs/AWS_DynamoDB/TestPlaybooks/playbook-AWS-DynamoDB-Test.yml b/Packs/AWS_DynamoDB/TestPlaybooks/playbook-AWS-DynamoDB-Test.yml index 1eed54bfdcc9..9fa453a32bf6 100644 --- a/Packs/AWS_DynamoDB/TestPlaybooks/playbook-AWS-DynamoDB-Test.yml +++ b/Packs/AWS_DynamoDB/TestPlaybooks/playbook-AWS-DynamoDB-Test.yml @@ -173,7 +173,7 @@ tasks: - "8" scriptarguments: backup_arn: - simple: arn:aws:dynamodb:eu-central-1:120785635586:table/Music/backup/01618149183237-23ebc580 + simple: arn:aws:dynamodb:eu-central-1:123456789012:table/Music/backup/01618149183237-23ebc580 raw_json: {} region: {} roleArn: {} @@ -435,7 +435,7 @@ tasks: raw_json: {} region: {} resource_arn: - simple: arn:aws:dynamodb:eu-central-1:120785635586:table/Music + simple: arn:aws:dynamodb:eu-central-1:123456789012:table/Music roleArn: {} roleSessionDuration: {} roleSessionName: {} @@ -480,7 +480,7 @@ tasks: raw_json: {} region: {} resource_arn: - simple: arn:aws:dynamodb:eu-central-1:120785635586:table/Music + simple: arn:aws:dynamodb:eu-central-1:123456789012:table/Music roleArn: {} roleSessionDuration: {} roleSessionName: {} @@ -522,7 +522,7 @@ tasks: raw_json: {} region: {} resource_arn: - simple: arn:aws:dynamodb:eu-central-1:120785635586:table/Music + simple: arn:aws:dynamodb:eu-central-1:123456789012:table/Music roleArn: {} roleSessionDuration: {} roleSessionName: {} @@ -679,7 +679,7 @@ tasks: raw_json: {} region: {} resource_arn: - simple: arn:aws:dynamodb:eu-central-1:120785635586:table/Music + simple: arn:aws:dynamodb:eu-central-1:123456789012:table/Music roleArn: {} roleSessionDuration: {} roleSessionName: {} diff --git a/Packs/AbnormalSecurity/.pack-ignore b/Packs/AbnormalSecurity/.pack-ignore index 413c7cdfb2ba..e40758e479b0 100644 --- a/Packs/AbnormalSecurity/.pack-ignore +++ b/Packs/AbnormalSecurity/.pack-ignore @@ -5,5 +5,5 @@ ignore=IM111 ignore=MR108 [file:AbnormalSecurityEventCollector.yml] -ignore=MR108 +ignore=MR108,IN124 diff --git a/Packs/AbnormalSecurity/Integrations/AbnormalSecurity/AbnormalSecurity.py b/Packs/AbnormalSecurity/Integrations/AbnormalSecurity/AbnormalSecurity.py index 019c33864d0c..05fcc1c70772 100644 --- a/Packs/AbnormalSecurity/Integrations/AbnormalSecurity/AbnormalSecurity.py +++ b/Packs/AbnormalSecurity/Integrations/AbnormalSecurity/AbnormalSecurity.py @@ -633,7 +633,7 @@ def submit_an_inquiry_to_request_a_report_on_misjudgement_by_abnormal_security_c def submit_false_negative_report_command(client, args): - recipient_email = str(args.get('recipient_email;', '')) + recipient_email = str(args.get('recipient_email', '')) sender_email = str(args.get('sender_email', '')) subject = str(args.get('subject', '')) response = client.submit_false_negative_report_request(recipient_email, sender_email, subject) @@ -646,7 +646,7 @@ def submit_false_negative_report_command(client, args): def submit_false_positive_report_command(client, args): - portal_link = str(args.get('portal_link;', '')) + portal_link = str(args.get('portal_link', '')) response = client.submit_false_positive_report_request(portal_link) command_results = CommandResults( readable_output=response, diff --git a/Packs/AbnormalSecurity/Integrations/AbnormalSecurityEventCollector/AbnormalSecurityEventCollector.py b/Packs/AbnormalSecurity/Integrations/AbnormalSecurityEventCollector/AbnormalSecurityEventCollector.py index e631d805de67..48e1a3236259 100644 --- a/Packs/AbnormalSecurity/Integrations/AbnormalSecurityEventCollector/AbnormalSecurityEventCollector.py +++ b/Packs/AbnormalSecurity/Integrations/AbnormalSecurityEventCollector/AbnormalSecurityEventCollector.py @@ -111,7 +111,7 @@ def main(): verify = params['verify'] proxy = params['proxy'] after = arg_to_datetime( - arg=params.get('after'), arg_name='after', required=True).strftime("%Y-%m-%dT%H:%M:%SZ") # type: ignore + arg='1 minute').strftime("%Y-%m-%dT%H:%M:%SZ") # type: ignore client = Client(base_url='https://api.abnormalplatform.com/v1', verify=verify, proxy=proxy, @@ -122,6 +122,7 @@ def main(): after = last_run command = demisto.command() + demisto.debug(f'Command being called is {command}') try: threats, last_run = get_events(client, after) if command == 'test-module': diff --git a/Packs/AbnormalSecurity/Integrations/AbnormalSecurityEventCollector/AbnormalSecurityEventCollector.yml b/Packs/AbnormalSecurity/Integrations/AbnormalSecurityEventCollector/AbnormalSecurityEventCollector.yml index 5e2477a5f5a4..c737e1bfc55e 100644 --- a/Packs/AbnormalSecurity/Integrations/AbnormalSecurityEventCollector/AbnormalSecurityEventCollector.yml +++ b/Packs/AbnormalSecurity/Integrations/AbnormalSecurityEventCollector/AbnormalSecurityEventCollector.yml @@ -17,6 +17,7 @@ configuration: type: 0 defaultvalue: 1 day required: false + hidden: true - display: Use system proxy settings name: proxy type: 8 @@ -40,7 +41,7 @@ script: - 'True' - 'False' required: true - dockerimage: demisto/python3:3.10.13.78960 + dockerimage: demisto/python3:3.10.14.92207 isfetchevents: true subtype: python3 fromversion: 6.8.0 diff --git a/Packs/AbnormalSecurity/Integrations/AbnormalSecurityEventCollector/README.md b/Packs/AbnormalSecurity/Integrations/AbnormalSecurityEventCollector/README.md index 2b2b0ba8a835..71fcaad7433f 100644 --- a/Packs/AbnormalSecurity/Integrations/AbnormalSecurityEventCollector/README.md +++ b/Packs/AbnormalSecurity/Integrations/AbnormalSecurityEventCollector/README.md @@ -1,6 +1,10 @@ Abnormal Security Event Collector integration for XSIAM. This integration was integrated and tested with version 01 of Abnormal Security Event Collector +<~XSIAM> +This is the default integration for this content pack when configured by the Data Onboarder. + + ## Configure Abnormal Security Event Collector on Cortex XSIAM 1. Navigate to **Settings** > **Integrations** > **Servers & Services**. diff --git a/Packs/AbnormalSecurity/ReleaseNotes/2_2_8.md b/Packs/AbnormalSecurity/ReleaseNotes/2_2_8.md new file mode 100644 index 000000000000..3363fb5e783b --- /dev/null +++ b/Packs/AbnormalSecurity/ReleaseNotes/2_2_8.md @@ -0,0 +1,7 @@ + +#### Integrations + +##### Abnormal Security + +- Fixed an issue in the **abnormal-security-submit_false_positive_report** command where the *portal_link* argument did not work as expected. +- Fixed an issue in the **abnormal-security-submit_false_negative_report** command where the *recipient_email* argument did not work as expected. diff --git a/Packs/AbnormalSecurity/ReleaseNotes/2_2_9.md b/Packs/AbnormalSecurity/ReleaseNotes/2_2_9.md new file mode 100644 index 000000000000..bb5855e7e7fe --- /dev/null +++ b/Packs/AbnormalSecurity/ReleaseNotes/2_2_9.md @@ -0,0 +1,7 @@ + +#### Integrations + +##### Abnormal Security Event Collector + +- Changed the default value for *First fetch time interval* parameter to 1 minute and hid it. +- Updated the Docker image to: *demisto/python3:3.10.14.92207*. \ No newline at end of file diff --git a/Packs/AbnormalSecurity/pack_metadata.json b/Packs/AbnormalSecurity/pack_metadata.json index dc0ae32b4319..896241b4a068 100644 --- a/Packs/AbnormalSecurity/pack_metadata.json +++ b/Packs/AbnormalSecurity/pack_metadata.json @@ -2,7 +2,7 @@ "name": "Abnormal Security", "description": "Abnormal Security detects and protects against the whole spectrum of email attacks", "support": "partner", - "currentVersion": "2.2.7", + "currentVersion": "2.2.9", "author": "Abnormal Security", "url": "", "email": "support@abnormalsecurity.com", @@ -23,5 +23,6 @@ "marketplaces": [ "xsoar", "marketplacev2" - ] + ], + "defaultDataSource": "Abnormal Security Event Collector" } \ No newline at end of file diff --git a/Packs/AccessInvestigation/ReleaseNotes/1_2_8.md b/Packs/AccessInvestigation/ReleaseNotes/1_2_8.md new file mode 100644 index 000000000000..4a0c5ba98e57 --- /dev/null +++ b/Packs/AccessInvestigation/ReleaseNotes/1_2_8.md @@ -0,0 +1,3 @@ +## Access Investigation + +- Locked dependencies of the pack to ensure stability for versioned core packs. No changes in this release. diff --git a/Packs/AccessInvestigation/pack_metadata.json b/Packs/AccessInvestigation/pack_metadata.json index ac7dbf354949..b03b5272a43a 100644 --- a/Packs/AccessInvestigation/pack_metadata.json +++ b/Packs/AccessInvestigation/pack_metadata.json @@ -2,7 +2,7 @@ "name": "Access Investigation", "description": "This Content Pack automates response to unauthorised access incidents and contains customer access incident views and layouts to aid investigation.", "support": "xsoar", - "currentVersion": "1.2.7", + "currentVersion": "1.2.8", "author": "Cortex XSOAR", "url": "https://www.paloaltonetworks.com/cortex", "email": "", diff --git a/Packs/Akamai_SIEM/ModelingRules/Akamai_WAF/Akamai_WAF.xif b/Packs/Akamai_SIEM/ModelingRules/Akamai_WAF/Akamai_WAF.xif index b2b1b93cb8b1..d1f0623532f8 100644 --- a/Packs/Akamai_SIEM/ModelingRules/Akamai_WAF/Akamai_WAF.xif +++ b/Packs/Akamai_SIEM/ModelingRules/Akamai_WAF/Akamai_WAF.xif @@ -14,7 +14,8 @@ alter // pre-modeling extractions response_code = httpMessage -> status, rule_actions = arraystring(json_extract_scalar_array(attackData, "$.ruleActions"), ";"), rule_messages = json_extract_scalar_array(attackData, "$.ruleMessages"), - rules = arraystring(json_extract_scalar_array(attackData, "$.rules"), ";") + rules = arraystring(json_extract_scalar_array(attackData, "$.rules"), ";"), + as_number = geo -> asn | alter // initialize the url components // set url scheme according to protocol which is an enum of either 80 or 443 @@ -47,7 +48,7 @@ alter // pre-modeling extractions xdm.observer.name = type, // Characterizes the source of this report data. Value is always "akamai_siem". xdm.source.application.name = clientData -> appBundleId, xdm.source.application.version = clientData -> appVersion, - xdm.source.asn.as_number = to_integer(geo -> asn), // The AS number or numbers that the IP belongs to. + xdm.source.asn.as_number = if(as_number ~= "\D|^\s*$", null, to_integer(as_number)), // The AS number or numbers that the IP belongs to. xdm.source.host.os_family = if(client_platform contains "WINDOWS", XDM_CONST.OS_FAMILY_WINDOWS, client_platform contains "MAC", XDM_CONST.OS_FAMILY_MACOS, client_platform contains "LINUX", XDM_CONST.OS_FAMILY_LINUX, client_platform contains "ANDROID", XDM_CONST.OS_FAMILY_ANDROID, client_platform contains "IOS", XDM_CONST.OS_FAMILY_IOS, client_platform contains "UBUNTU", XDM_CONST.OS_FAMILY_UBUNTU, client_platform contains "DEBIAN", XDM_CONST.OS_FAMILY_DEBIAN, client_platform contains "FEDORA", XDM_CONST.OS_FAMILY_FEDORA, client_platform contains "CENTOS", XDM_CONST.OS_FAMILY_CENTOS, client_platform contains "CHROME", XDM_CONST.OS_FAMILY_CHROMEOS, client_platform contains "SOLARIS", XDM_CONST.OS_FAMILY_SOLARIS, client_platform contains "SCADA", XDM_CONST.OS_FAMILY_SCADA, client_platform), xdm.source.ipv4 = if(client_ip ~= "(?:\d{1,3}\.){3}\d{1,3}", client_ip), // // The IPv4 address of the client making the request. xdm.source.ipv6 = if(client_ip ~= ":", client_ip), // // The IPv6 address of the client making the request. @@ -60,4 +61,4 @@ alter // pre-modeling extractions xdm.target.port = port, xdm.target.resource.id = attackData -> apiId, // For attacks on API services, this is a unique identifier under which the API is protected. It corresponds to the apiEndPointId value in the API Endpoint Definition API. xdm.target.sent_bytes = to_integer(httpMessage -> bytes), // The number of bytes served in the response. - xdm.target.url = url; \ No newline at end of file + xdm.target.url = url; diff --git a/Packs/Akamai_SIEM/ReleaseNotes/1_1_2.md b/Packs/Akamai_SIEM/ReleaseNotes/1_1_2.md new file mode 100644 index 000000000000..2d5fd7287146 --- /dev/null +++ b/Packs/Akamai_SIEM/ReleaseNotes/1_1_2.md @@ -0,0 +1,6 @@ + +#### Modeling Rules + +##### Akamai WAF Modeling Rule + +Updated the Modeling Rule logic for AS Numbers extraction. diff --git a/Packs/Akamai_SIEM/ReleaseNotes/1_1_3.md b/Packs/Akamai_SIEM/ReleaseNotes/1_1_3.md new file mode 100644 index 000000000000..f83f1b50be88 --- /dev/null +++ b/Packs/Akamai_SIEM/ReleaseNotes/1_1_3.md @@ -0,0 +1,6 @@ + +#### Modeling Rules + +##### Akamai WAF Modeling Rule + +Updated the Modeling Rule logic, ignoring empty string values for the xdm.source.asn.as_number field. diff --git a/Packs/Akamai_SIEM/pack_metadata.json b/Packs/Akamai_SIEM/pack_metadata.json index 721ef0661862..9eef956ff51f 100644 --- a/Packs/Akamai_SIEM/pack_metadata.json +++ b/Packs/Akamai_SIEM/pack_metadata.json @@ -2,7 +2,7 @@ "name": "Akamai WAF SIEM", "description": "Use the Akamai WAF SIEM integration to retrieve security events from Akamai Web Application Firewall (WAF) service.", "support": "xsoar", - "currentVersion": "1.1.1", + "currentVersion": "1.1.3", "author": "Cortex XSOAR", "url": "https://www.paloaltonetworks.com/cortex", "email": "", diff --git a/Packs/Akamai_WAF/Integrations/Akamai_WAF/Akamai_WAF.yml b/Packs/Akamai_WAF/Integrations/Akamai_WAF/Akamai_WAF.yml index 73a414e837d2..95dffaf7edd7 100644 --- a/Packs/Akamai_WAF/Integrations/Akamai_WAF/Akamai_WAF.yml +++ b/Packs/Akamai_WAF/Integrations/Akamai_WAF/Akamai_WAF.yml @@ -1477,7 +1477,7 @@ script: - contextPath: Akamai.Enrollments.Change.Status description: Akamai enrollments change status. type: Dictionary - dockerimage: demisto/auth-utils:1.0.0.79304 + dockerimage: demisto/auth-utils:1.0.0.94075 script: '' subtype: python3 type: python diff --git a/Packs/Akamai_WAF/ReleaseNotes/2_0_10.md b/Packs/Akamai_WAF/ReleaseNotes/2_0_10.md new file mode 100644 index 000000000000..ac3e5a4dcbaa --- /dev/null +++ b/Packs/Akamai_WAF/ReleaseNotes/2_0_10.md @@ -0,0 +1,6 @@ + +#### Integrations + +##### Akamai WAF + +- Updated the Docker image to: *demisto/auth-utils:1.0.0.94075*. diff --git a/Packs/Akamai_WAF/pack_metadata.json b/Packs/Akamai_WAF/pack_metadata.json index 57e7d85fdf28..82b6ba313cea 100644 --- a/Packs/Akamai_WAF/pack_metadata.json +++ b/Packs/Akamai_WAF/pack_metadata.json @@ -2,7 +2,7 @@ "name": "Akamai WAF", "description": "Use the Akamai WAF integration to manage common sets of lists used by various Akamai security products and features.", "support": "xsoar", - "currentVersion": "2.0.9", + "currentVersion": "2.0.10", "author": "Cortex XSOAR", "url": "https://www.paloaltonetworks.com/cortex", "email": "", diff --git a/Packs/Algosec/Integrations/AlgoSec/README.md b/Packs/Algosec/Integrations/AlgoSec/README.md index e69de29bb2d1..881442347ccc 100644 --- a/Packs/Algosec/Integrations/AlgoSec/README.md +++ b/Packs/Algosec/Integrations/AlgoSec/README.md @@ -0,0 +1,129 @@ +Algosec BusinessFlow(ABF), Firewall Analyzer (AFA) and FireFlow(AFF). + +## Configure AlgoSec on XSOAR +--- + +1. Navigate to __Settings__ > __Integrations__ > __Servers & Services__. +2. Search for AlgoSec. +3. Click __Add instance__ to create and configure a new integration instance. + * __Name__: a textual name for the integration instance. + * __Server URL (e.g. https://192.168.0.1)__ + * __Credentials__ + * __Trust any certificate (not secure)__ + * __Use system proxy settings__ +4. Click __Test__ to validate the URLs, token, and connection. + +## Commands +--- +You can execute these commands from the XSOAR CLI, as part of an automation, or in a playbook. +After you successfully execute a command, a DBot message appears in the War Room with the command details. +1. algosec-get-ticket +2. algosec-create-ticket +3. algosec-get-applications +4. algosec-get-network-object +5. algosec-query +### 1. algosec-get-ticket +--- +Retrieves a FireFlow change request by its ID + +##### Base Command + +`algosec-get-ticket` +##### Input + +| **Argument Name** | **Description** | **Required** | +| --- | --- | --- | +| ticketId | ID of requested change request | Required | + + +##### Context Output + +There is no context output for this command. + + +### 2. algosec-create-ticket +--- +Creates a new FireFlow change request +##### Base Command + +`algosec-create-ticket` +##### Input + +| **Argument Name** | **Description** | **Required** | +| --- | --- | --- | +| description | A free text description of the issue | Optional | +| devices | A list of device names, on which the change should be made | Optional | +| action | The device action to perform for the traffic. This can be either
of the following: \U0010FC00 1 - Allow the traffic \U0010FC00 0 - Block the
traffic
| Required | +| destAddress | The destination address to perform the action on | Required | +| sourceAddress | The source address to perform the action on | Required | +| requestor | The email address of the requestor | Required | +| subject | The change request's title | Required | +| service | The device service or port for the connection, for example, "http" or Mandatory "tcp/123" | Required | +| user | The user for the connection | Required | +| application | The application for the connection | Required | + + +##### Context Output + +There is no context output for this command. + + +### 3. algosec-get-applications +--- +Find applications containing network objects related to IP address using BusinessFlow + +##### Base Command + +`algosec-get-applications` +##### Input + +| **Argument Name** | **Description** | **Required** | +| --- | --- | --- | +| address | The IP/Subnet to search | Required | +| type | The search method for the address | Optional | + + +##### Context Output + +There is no context output for this command. + +### 4. algosec-get-network-object +--- +Find network objects related to IP address + +##### Base Command + +`algosec-get-network-object` +##### Input + +| **Argument Name** | **Description** | **Required** | +| --- | --- | --- | +| address | The IP/Subnet to search | Required | +| type | The search method for the address (default is INTERSECT) | Optional | + + +##### Context Output + +There is no context output for this command. + +### 5. algosec-query +--- +Performs a batch traffic simulation query using Firewall Analyzer + +##### Base Command + +`algosec-query` +##### Input + +| **Argument Name** | **Description** | **Required** | +| --- | --- | --- | +| source | source(s) for the query. Multiple values are separated by commas (,) | Required | +| destination | destination(s) for the query. Multiple values are separated by commas (,) | Required | +| service | service(s) for the query. Multiple values are separated by commas (,) | Required | +| user | user for the query | Optional | +| application | application for the query | Optional | + + +##### Context Output + +There is no context output for this command. diff --git a/Packs/AlienVault_OTX/TestPlaybooks/playbook-Alienvault_OTX_v2-test.yml b/Packs/AlienVault_OTX/TestPlaybooks/playbook-Alienvault_OTX_v2-test.yml index 986af5d727b5..938854340f43 100644 --- a/Packs/AlienVault_OTX/TestPlaybooks/playbook-Alienvault_OTX_v2-test.yml +++ b/Packs/AlienVault_OTX/TestPlaybooks/playbook-Alienvault_OTX_v2-test.yml @@ -79,7 +79,7 @@ tasks: - "3" scriptarguments: ip: - simple: 8.8.8.8 + simple: 1.2.3.4 threshold: {} continueonerror: true separatecontext: false @@ -216,7 +216,7 @@ tasks: - "7" scriptarguments: domain: - simple: example.com + simple: paloaltonetworks.com threshold: {} continueonerror: true separatecontext: false @@ -894,7 +894,7 @@ tasks: - "27" scriptarguments: indicator: - simple: 8.8.8.8 + simple: 1.2.3.4 indicator_type: simple: IPv4 continueonerror: true @@ -1022,7 +1022,7 @@ tasks: - "31" scriptarguments: indicator: - simple: 8.8.8.8 + simple: 1.2.3.4 indicator-type: simple: IPv4 indicator_type: @@ -1543,7 +1543,7 @@ tasks: - "47" scriptarguments: indicator: - simple: 8.8.8.8 + simple: 1.2.3.4 indicator-type: simple: IPv4 indicator_type: diff --git a/Packs/Anomali_ThreatStream/Integrations/AnomaliThreatStreamv3/AnomaliThreatStreamv3.py b/Packs/Anomali_ThreatStream/Integrations/AnomaliThreatStreamv3/AnomaliThreatStreamv3.py index e63665c122bb..98ca9d09b4f5 100644 --- a/Packs/Anomali_ThreatStream/Integrations/AnomaliThreatStreamv3/AnomaliThreatStreamv3.py +++ b/Packs/Anomali_ThreatStream/Integrations/AnomaliThreatStreamv3/AnomaliThreatStreamv3.py @@ -31,6 +31,7 @@ HEADERS = { 'Content-Type': 'application/json' } +RETRY_COUNT = 2 IOC_ARGS_TO_INDICATOR_KEY_MAP = { 'domain': { @@ -192,12 +193,14 @@ def http_request(self, method, url_suffix, params=None, data=None, headers=None, files=None, json=None, + without_credentials=False, resp_type='json'): """ A wrapper for requests lib to send our requests and handle requests and responses better. """ params = params or {} - params.update(self.credentials) + if not without_credentials: + params.update(self.credentials) res = super()._http_request( method=method, url_suffix=url_suffix, @@ -208,6 +211,7 @@ def http_request(self, method, files=files, resp_type=resp_type, error_handler=self.error_handler, + retries=RETRY_COUNT, ) return res @@ -2522,7 +2526,7 @@ def get_indicators(client: Client, **kwargs): next_page = res.get('meta', {}).get('next', None) while len(iocs_context) < limit and next_page: next_page = next_page.replace('api/', '') - res = client.http_request("GET", next_page) + res = client.http_request("GET", next_page, without_credentials=True) iocs_list = res.get('objects', None) next_page = res.get('meta', {}).get('next', None) if iocs_list: diff --git a/Packs/Anomali_ThreatStream/Integrations/AnomaliThreatStreamv3/AnomaliThreatStreamv3.yml b/Packs/Anomali_ThreatStream/Integrations/AnomaliThreatStreamv3/AnomaliThreatStreamv3.yml index 7bf0a9794f18..7338f5a79dd4 100644 --- a/Packs/Anomali_ThreatStream/Integrations/AnomaliThreatStreamv3/AnomaliThreatStreamv3.yml +++ b/Packs/Anomali_ThreatStream/Integrations/AnomaliThreatStreamv3/AnomaliThreatStreamv3.yml @@ -6431,7 +6431,7 @@ script: isArray: true description: Remove tags from the indicators. name: threatstream-remove-indicator-tag - dockerimage: demisto/py3-tools:1.0.0.87415 + dockerimage: demisto/py3-tools:1.0.0.93223 runonce: false script: '-' subtype: python3 diff --git a/Packs/Anomali_ThreatStream/Integrations/AnomaliThreatStreamv3/AnomaliThreatStreamv3_test.py b/Packs/Anomali_ThreatStream/Integrations/AnomaliThreatStreamv3/AnomaliThreatStreamv3_test.py index 639356b16aa0..26e7b7c46773 100644 --- a/Packs/Anomali_ThreatStream/Integrations/AnomaliThreatStreamv3/AnomaliThreatStreamv3_test.py +++ b/Packs/Anomali_ThreatStream/Integrations/AnomaliThreatStreamv3/AnomaliThreatStreamv3_test.py @@ -1864,3 +1864,23 @@ def test_remove_indicator_tag_command_success( # Verify result assert result.readable_output == expected_output + + +@pytest.mark.parametrize( + "without_credentials, expected_params", + [ + (False, {'username': '', 'api_key': ''}), + (True, {}), + ], +) +def test_http_request_without_credentials(mocker, without_credentials: bool, expected_params: dict): + """ + Given: Different boolean value for without_credentials argument of Client.http_request() + When: Calling http_request() + Then: Ensuring the credentials parameters are added if the value is True, and not added otherwise. + """ + from AnomaliThreatStreamv3 import BaseClient + http_request = mocker.patch.object(BaseClient, "_http_request", return_value={}) + client: BaseClient = mock_client() + client.http_request("GET", "/hello", without_credentials=without_credentials) + assert http_request.call_args.kwargs["params"] == expected_params diff --git a/Packs/Anomali_ThreatStream/ReleaseNotes/2_2_18.md b/Packs/Anomali_ThreatStream/ReleaseNotes/2_2_18.md new file mode 100644 index 000000000000..a0e2886fa0f2 --- /dev/null +++ b/Packs/Anomali_ThreatStream/ReleaseNotes/2_2_18.md @@ -0,0 +1,8 @@ + +#### Integrations + +##### Anomali ThreatStream v3 + +- Fixed an issue in the ***threatstream-get-indicators*** command where not all of the results were returned when the *limit* argument was very large. +- Added retries to the API calls to prevent connection errors. +- Updated the Docker image to: *demisto/py3-tools:1.0.0.93223*. diff --git a/Packs/Anomali_ThreatStream/pack_metadata.json b/Packs/Anomali_ThreatStream/pack_metadata.json index 4adaaebe95a1..62f095e9fc95 100644 --- a/Packs/Anomali_ThreatStream/pack_metadata.json +++ b/Packs/Anomali_ThreatStream/pack_metadata.json @@ -2,7 +2,7 @@ "name": "Anomali ThreatStream", "description": "Use Anomali ThreatStream to query and submit threats.", "support": "xsoar", - "currentVersion": "2.2.17", + "currentVersion": "2.2.18", "author": "Cortex XSOAR", "url": "https://www.paloaltonetworks.com/cortex", "email": "", diff --git a/Packs/AnsibleKubernetes/Integrations/AnsibleKubernetes/AnsibleKubernetes_description.md b/Packs/AnsibleKubernetes/Integrations/AnsibleKubernetes/AnsibleKubernetes_description.md index e9b80c5e0832..accdaeaf81c2 100644 --- a/Packs/AnsibleKubernetes/Integrations/AnsibleKubernetes/AnsibleKubernetes_description.md +++ b/Packs/AnsibleKubernetes/Integrations/AnsibleKubernetes/AnsibleKubernetes_description.md @@ -18,7 +18,21 @@ metadata: EOF ``` -2. Grant the service account an appropriate role. Refer to [Kubernetes RBAC docs](https://kubernetes.io/docs/reference/access-authn-authz/rbac/) if granting more fine grain or scoped access. +2. Create secret for the above service account. +``` +kubectl apply -f - < **Integrations** > **Servers & Services**. diff --git a/Packs/AnsibleKubernetes/ReleaseNotes/1_0_7.md b/Packs/AnsibleKubernetes/ReleaseNotes/1_0_7.md new file mode 100644 index 000000000000..57025d56885c --- /dev/null +++ b/Packs/AnsibleKubernetes/ReleaseNotes/1_0_7.md @@ -0,0 +1,6 @@ + +#### Integrations + +##### Ansible Kubernetes + +Updated the help with instructions for getting an API Token following the changes in Kubernetes 1.24 that Secrets are not automatically generated when Service Account are created. \ No newline at end of file diff --git a/Packs/AnsibleKubernetes/pack_metadata.json b/Packs/AnsibleKubernetes/pack_metadata.json index 3ac442cca831..f9d7213b4510 100644 --- a/Packs/AnsibleKubernetes/pack_metadata.json +++ b/Packs/AnsibleKubernetes/pack_metadata.json @@ -2,7 +2,7 @@ "name": "Ansible Kubernetes", "description": "Manage and control Kubernetes clusters.", "support": "xsoar", - "currentVersion": "1.0.6", + "currentVersion": "1.0.7", "author": "Cortex XSOAR", "url": "https://www.paloaltonetworks.com/cortex", "email": "", diff --git a/Packs/ApiModules/ReleaseNotes/2_2_24.md b/Packs/ApiModules/ReleaseNotes/2_2_24.md new file mode 100644 index 000000000000..24cc30ea757e --- /dev/null +++ b/Packs/ApiModules/ReleaseNotes/2_2_24.md @@ -0,0 +1,5 @@ +#### Scripts + +##### CoreIRApiModule + +Fixed an issue where running the ***get-incidents*** command with `starred="false"` returned starred incidents. \ No newline at end of file diff --git a/Packs/ApiModules/Scripts/CoreIRApiModule/CoreIRApiModule.py b/Packs/ApiModules/Scripts/CoreIRApiModule/CoreIRApiModule.py index 74cee5b940ad..219d0d58c0d6 100644 --- a/Packs/ApiModules/Scripts/CoreIRApiModule/CoreIRApiModule.py +++ b/Packs/ApiModules/Scripts/CoreIRApiModule/CoreIRApiModule.py @@ -209,7 +209,7 @@ def get_incidents(self, incident_id_list=None, lte_modification_time=None, gte_m 'value': status }) - if starred and starred_incidents_fetch_window: + if starred and starred_incidents_fetch_window and demisto.command() == 'fetch-incidents': filters.append({ 'field': 'starred', 'operator': 'eq', @@ -220,47 +220,60 @@ def get_incidents(self, incident_id_list=None, lte_modification_time=None, gte_m 'operator': 'gte', 'value': starred_incidents_fetch_window }) - if demisto.command() == 'fetch-incidents': - if len(filters) > 0: - request_data['filters'] = filters - incidents = self.handle_fetch_starred_incidents(limit, page_number, request_data) - return incidents - else: - if lte_creation_time: - filters.append({ - 'field': 'creation_time', - 'operator': 'lte', - 'value': date_to_timestamp(lte_creation_time, TIME_FORMAT) - }) + if len(filters) > 0: + request_data['filters'] = filters + incidents = self.handle_fetch_starred_incidents(limit, page_number, request_data) + return incidents - if gte_creation_time: - filters.append({ - 'field': 'creation_time', - 'operator': 'gte', - 'value': date_to_timestamp(gte_creation_time, TIME_FORMAT) - }) + if starred is not None and demisto.command() != 'fetch-incidents': + filters.append({ + 'field': 'starred', + 'operator': 'eq', + 'value': starred + }) - if lte_modification_time: - filters.append({ - 'field': 'modification_time', - 'operator': 'lte', - 'value': date_to_timestamp(lte_modification_time, TIME_FORMAT) - }) + if lte_creation_time: + filters.append({ + 'field': 'creation_time', + 'operator': 'lte', + 'value': date_to_timestamp(lte_creation_time, TIME_FORMAT) + }) - if gte_modification_time: - filters.append({ - 'field': 'modification_time', - 'operator': 'gte', - 'value': date_to_timestamp(gte_modification_time, TIME_FORMAT) - }) + if gte_creation_time: + filters.append({ + 'field': 'creation_time', + 'operator': 'gte', + 'value': date_to_timestamp(gte_creation_time, TIME_FORMAT) + }) + elif starred and starred_incidents_fetch_window and demisto.command() != 'fetch-incidents': + # backwards compatibility of starred_incidents_fetch_window + filters.append({ + 'field': 'creation_time', + 'operator': 'gte', + 'value': starred_incidents_fetch_window + }) - if gte_creation_time_milliseconds > 0: - filters.append({ - 'field': 'creation_time', - 'operator': 'gte', - 'value': gte_creation_time_milliseconds - }) + if lte_modification_time: + filters.append({ + 'field': 'modification_time', + 'operator': 'lte', + 'value': date_to_timestamp(lte_modification_time, TIME_FORMAT) + }) + + if gte_modification_time: + filters.append({ + 'field': 'modification_time', + 'operator': 'gte', + 'value': date_to_timestamp(gte_modification_time, TIME_FORMAT) + }) + + if gte_creation_time_milliseconds > 0: + filters.append({ + 'field': 'creation_time', + 'operator': 'gte', + 'value': gte_creation_time_milliseconds + }) if len(filters) > 0: request_data['filters'] = filters @@ -646,7 +659,7 @@ def blocklist_files(self, hash_list, comment=None, incident_id=None, detailed_re method='POST', url_suffix='/hash_exceptions/blocklist/', json_data={'request_data': request_data}, - ok_codes=(200, 201, 500,), + ok_codes=(200, 201, 500), timeout=self.timeout ) return reply.get('reply') @@ -663,9 +676,13 @@ def remove_blocklist_files(self, hash_list, comment=None, incident_id=None): method='POST', url_suffix='/hash_exceptions/blocklist/remove/', json_data={'request_data': request_data}, + ok_codes=(200, 201, 500), timeout=self.timeout ) - return reply.get('reply') + res = reply.get('reply') + if isinstance(res, dict) and res.get('err_code') == 500: + raise DemistoException(f"{res.get('err_msg')}\nThe requested hash might not be in the blocklist.") + return res def allowlist_files(self, hash_list, comment=None, incident_id=None, detailed_response=False): request_data: Dict[str, Any] = {"hash_list": hash_list} @@ -2299,8 +2316,15 @@ def restore_file_command(client, args): ) +def validate_sha256_hashes(hash_list): + for hash_value in hash_list: + if detect_file_indicator_type(hash_value) != 'sha256': + raise DemistoException(f'The provided hash {hash_value} is not a valid sha256.') + + def blocklist_files_command(client, args): hash_list = argToList(args.get('hash_list')) + validate_sha256_hashes(hash_list) comment = args.get('comment') incident_id = arg_to_number(args.get('incident_id')) detailed_response = argToBoolean(args.get('detailed_response', False)) @@ -2333,6 +2357,7 @@ def blocklist_files_command(client, args): def remove_blocklist_files_command(client: CoreClient, args: Dict) -> CommandResults: hash_list = argToList(args.get('hash_list')) + validate_sha256_hashes(hash_list) comment = args.get('comment') incident_id = arg_to_number(args.get('incident_id')) @@ -4172,7 +4197,7 @@ def get_incidents_command(client, args): statuses = argToList(args.get('status', '')) - starred = args.get('starred') + starred = argToBoolean(args.get('starred')) if args.get('starred', None) not in ('', None) else None starred_incidents_fetch_window = args.get('starred_incidents_fetch_window', '3 days') starred_incidents_fetch_window, _ = parse_date_range(starred_incidents_fetch_window, to_timestamp=True) diff --git a/Packs/ApiModules/Scripts/CoreIRApiModule/CoreIRApiModule_test.py b/Packs/ApiModules/Scripts/CoreIRApiModule/CoreIRApiModule_test.py index fd355bc78375..6da6314511e2 100644 --- a/Packs/ApiModules/Scripts/CoreIRApiModule/CoreIRApiModule_test.py +++ b/Packs/ApiModules/Scripts/CoreIRApiModule/CoreIRApiModule_test.py @@ -7,6 +7,7 @@ from pytest_mock import MockerFixture import pytest +import demistomock import demistomock as demisto from CommonServerPython import Common, tableToMarkdown, pascalToSpace, DemistoException from CoreIRApiModule import CoreClient, handle_outgoing_issue_closure, XSOAR_RESOLVED_STATUS_TO_XDR @@ -3788,15 +3789,83 @@ def test_get_incident_list_by_status(self, mocker): } assert expected_output == outputs - def test_get_starred_incident_list(self, requests_mock): + @freeze_time("2024-01-15 17:00:00 UTC") + @pytest.mark.parametrize('starred, expected_starred', + [(True, True), + (False, False), + ('true', True), + ('false', False), + (None, None), + ('', None)]) + def test_get_starred_incident_list_from_get(self, mocker, requests_mock, starred, expected_starred): """ Given: A query with starred parameters. When: Running get_incidents_command. - Then: Ensure the starred output is returned. + Then: Ensure the starred output is returned and the request filters are set correctly. """ get_incidents_list_response = load_test_data('./test_data/get_starred_incidents_list.json') - requests_mock.post(f'{Core_URL}/public_api/v1/incidents/get_incidents/', json=get_incidents_list_response) + get_incidents_request = requests_mock.post(f'{Core_URL}/public_api/v1/incidents/get_incidents/', + json=get_incidents_list_response) + mocker.patch.object(demisto, 'command', return_value='get-incidents') + + client = CoreClient( + base_url=f'{Core_URL}/public_api/v1', headers={} + ) + + args = { + 'incident_id_list': '1 day', + 'starred': starred, + 'starred_incidents_fetch_window': '3 days' + } + + starred_filter_true = { + 'field': 'starred', + 'operator': 'eq', + 'value': True + } + + starred_filter_false = { + 'field': 'starred', + 'operator': 'eq', + 'value': False + } + + starred_fetch_window_filter = { + 'field': 'creation_time', + 'operator': 'gte', + 'value': 1705078800000 + } + + _, outputs, _ = get_incidents_command(client, args) + + request_filters = get_incidents_request.last_request.json()['request_data']['filters'] + assert len(outputs['CoreApiModule.Incident(val.incident_id==obj.incident_id)']) >= 1 + if expected_starred: + assert starred_filter_true in request_filters + assert starred_fetch_window_filter in request_filters + assert outputs['CoreApiModule.Incident(val.incident_id==obj.incident_id)'][0]['starred'] is True + elif expected_starred is False: + assert starred_filter_false in request_filters + assert starred_fetch_window_filter not in request_filters + else: # expected_starred is None + assert starred_filter_true not in request_filters + assert starred_filter_false not in request_filters + assert starred_fetch_window_filter not in request_filters + + @freeze_time("2024-01-15 17:00:00 UTC") + @pytest.mark.parametrize('starred', [False, "False", 'false', None, '']) + def test_get_starred_false_incident_list_from_fetch(self, mocker, requests_mock, starred): + """ + Given: A query with starred=false parameter. + When: Running get_incidents_command from fetch-incidents. + Then: Ensure the request doesn't filter on starred incidents. + """ + + get_incidents_list_response = load_test_data('./test_data/get_starred_incidents_list.json') + mocker.patch.object(demisto, 'command', return_value='fetch-incidents') + get_incidents_request = requests_mock.post(f'{Core_URL}/public_api/v1/incidents/get_incidents/', + json=get_incidents_list_response) client = CoreClient( base_url=f'{Core_URL}/public_api/v1', headers={} @@ -3804,11 +3873,80 @@ def test_get_starred_incident_list(self, requests_mock): args = { 'incident_id_list': '1 day', - 'starred': True, + 'starred': starred, 'starred_incidents_fetch_window': '3 days' } + + starred_filter_true = { + 'field': 'starred', + 'operator': 'eq', + 'value': True + } + + starred_filter_false = { + 'field': 'starred', + 'operator': 'eq', + 'value': False + } + + starred_fetch_window_filter = { + 'field': 'creation_time', + 'operator': 'gte', + 'value': 1705078800000 + } + + _, outputs, _ = get_incidents_command(client, args) + + request_filters = get_incidents_request.last_request.json()['request_data']['filters'] + assert len(outputs['CoreApiModule.Incident(val.incident_id==obj.incident_id)']) >= 1 + assert starred_filter_true not in request_filters + assert starred_filter_false not in request_filters + assert starred_fetch_window_filter not in request_filters + + @freeze_time("2024-01-15 17:00:00 UTC") + @pytest.mark.parametrize('starred', [True, 'true', "True"]) + def test_get_starred_true_incident_list_from_fetch(self, mocker, starred): + """ + Given: A query with starred=true parameter. + When: Running get_incidents_command from fetch-incidents. + Then: Ensure the request filters on starred incidents and contains the starred_fetch_window_filter filter. + """ + + get_incidents_list_response = load_test_data('./test_data/get_starred_incidents_list.json') + mocker.patch.object(demisto, 'command', return_value='fetch-incidents') + handle_fetch_starred_mock = mocker.patch.object(CoreClient, + 'handle_fetch_starred_incidents', + return_value=get_incidents_list_response["reply"]['incidents']) + + client = CoreClient( + base_url=f'{Core_URL}/public_api/v1', headers={} + ) + + args = { + 'incident_id_list': '1 day', + 'starred': starred, + 'starred_incidents_fetch_window': '3 days' + } + + starred_filter_true = { + 'field': 'starred', + 'operator': 'eq', + 'value': True + } + + starred_fetch_window_filter = { + 'field': 'creation_time', + 'operator': 'gte', + 'value': 1705078800000 + } + _, outputs, _ = get_incidents_command(client, args) + handle_fetch_starred_mock.assert_called() + request_filters = handle_fetch_starred_mock.call_args.args[2]['filters'] + assert len(outputs['CoreApiModule.Incident(val.incident_id==obj.incident_id)']) >= 1 + assert starred_filter_true in request_filters + assert starred_fetch_window_filter in request_filters assert outputs['CoreApiModule.Incident(val.incident_id==obj.incident_id)'][0]['starred'] is True diff --git a/Packs/ApiModules/Scripts/DemistoClassApiModule/DemistoClassApiModule.py b/Packs/ApiModules/Scripts/DemistoClassApiModule/DemistoClassApiModule.py index de1a1a92ed1b..c25b8fb1d574 100644 --- a/Packs/ApiModules/Scripts/DemistoClassApiModule/DemistoClassApiModule.py +++ b/Packs/ApiModules/Scripts/DemistoClassApiModule/DemistoClassApiModule.py @@ -1,478 +1,32 @@ -from __future__ import print_function -import json -import uuid -import sys +import demistomock as demisto # noqa: F401 -SERVER_ERROR_MARKER = '[ERROR-fd5a7750-7182-4b38-90ba-091824478903]' +# Only the following integrations and scripts are using the Demisto class wrapper +DEMISTO_WRAPPER_INTEGRATIONS = ['Cortex XDR - IR', 'QRadar v3', 'SlackV3', 'ServiceNow v2'] +DEMISTO_WRAPPER_SCRIPTS = ['UnzipFile', 'DBotFindSimilarIncidents', 'ParseCSV'] -class Demisto: - """Wrapper class to interface with the Demisto server via stdin, stdout""" - INTEGRATION = 'Integration' - SCRIPT = 'Script' - - def __init__(self, context): - self.callingContext = context - self.is_integration = self.callingContext['integration'] - self.item_type = self.INTEGRATION if self.is_integration else self.SCRIPT - self.is_debug = False - self._args = dict(self.callingContext.get(u'args', {})) - if 'demisto_machine_learning_magic_key' in self._args: - import os - os.environ['DEMISTO_MACHINE_LEARNING_MAGIC_KEY'] = self._args['demisto_machine_learning_magic_key'] - is_debug = self.callingContext.get('context', {}).get('IsDebug', False) - if is_debug: - self.is_debug = True - self._args.pop('debug-mode', '') - self.__stdout_lock = None - self._stdout_lock_timeout = 60 - self._heartbeat_enabled = False - if context.get('command') == 'long-running-execution' and context.get('is_running_heartbeat'): - self.long_running_heartbeat_thread() - - def raise_exception_if_not_implemented(self, implemented_item_type, function_name): - """ - - :param implemented_item_type: Integration or Script, type that te function works with - :param function_name: The calling function name - - :return: - """ - if self.item_type != implemented_item_type: - raise Exception('Demisto object has no function `{function_name}` for {item_type}.'.format( - function_name=function_name, item_type=self.item_type)) - - def enable_multithreading(self): - from threading import Lock - if self.__stdout_lock is None: - self.__stdout_lock = Lock() - - def long_running_heartbeat_thread(self, enable=True): - self.raise_exception_if_not_implemented(self.INTEGRATION, 'long_running_heartbeat_thread') - if self._heartbeat_enabled == enable: - # nothing to do as state hasn't changed - return - self._heartbeat_enabled = enable - if self._heartbeat_enabled: - self.info("starting heartbeat thread") - self.enable_multithreading() - - def send_heartbeat(): - import time - counter = 0 - while True: - time.sleep(self.callingContext.get('heartbeat_interval', 30)) - if not self._heartbeat_enabled: - self.info("heartbeat disabled. Existing heartbeat thread.") - return - self.heartbeat("heartbeat counter: " + str(counter)) - counter += 1 - - import threading - self._heartbeat_thread = threading.Thread(target=send_heartbeat) - self._heartbeat_thread.setDaemon(True) - self._heartbeat_thread.start() - - def log(self, msg): - if self.is_integration: - self.__do_no_res({'type': 'entryLog', 'args': {'message': 'Integration log: ' + msg}}) - else: - self.__do_no_res({'type': 'entryLog', 'args': {'message': msg}}) - - def investigation(self): - return self.callingContext[u'context'][u'Inv'] - - def incidents(self, incidents=None): - if self.is_integration: - self.results({'Type': 1, 'Contents': json.dumps(incidents), 'ContentsFormat': 'json'}) - else: - return self.callingContext[u'context'][u'Incidents'] - - def incident(self): - if self.is_integration(): - return self.get_incidents()[0] - else: - return self.incidents()[0] - - def alerts(self): - self.raise_exception_if_not_implemented(self.SCRIPT, 'alerts') - return self.incidents() - - def get_incidents(self): - self.raise_exception_if_not_implemented(self.INTEGRATION, 'get_incidents') - return self.callingContext[u'context'][u'Incidents'] - - def get_alerts(self): - self.raise_exception_if_not_implemented(self.INTEGRATION, 'get_alerts') - return self.get_incidents() - - def alert(self): - return self.incident() - - def parentEntry(self): - return self.callingContext[u'context'][u'ParentEntry'] - - def context(self): - return self.callingContext[u'context'][u'ExecutionContext'] - - def integrationInstance(self): - self.raise_exception_if_not_implemented(self.INTEGRATION, 'integrationInstance') - return self.callingContext[u'context'][u'IntegrationInstance'] - - def args(self): - return self._args - - def uniqueFile(self): - return str(uuid.uuid4()) - - def getFilePath(self, id): - return self.__do({'type': 'getFileByEntryID', 'command': 'getFilePath', 'args': {'id': id}}) - - def getLastRun(self): - self.raise_exception_if_not_implemented(self.INTEGRATION, 'getLastRun') - return self.__do({'type': 'executeCommand', 'command': 'getLastRun', 'args': {}}) - - def setLastRun(self, value): - self.raise_exception_if_not_implemented(self.INTEGRATION, 'setLastRun') - return self.__do({'type': 'executeCommand', 'command': 'setLastRun', 'args': {'value': value}}) - - def getLastMirrorRun(self): - self.raise_exception_if_not_implemented(self.INTEGRATION, 'getLastMirrorRun') - return self.__do({'type': 'executeCommand', 'command': 'getLastMirrorRun', 'args': {}}) - - def setLastMirrorRun(self, value): - self.raise_exception_if_not_implemented(self.INTEGRATION, 'setLastMirrorRun') - return self.__do({'type': 'executeCommand', 'command': 'setLastMirrorRun', 'args': {'value': value}}) - - def internalHttpRequest(self, method, uri, body=None): - return self.__do({'type': 'executeCommand', 'command': 'internalHttpRequest', - 'args': {'method': method, 'uri': uri, 'body': body}}) - - def getIntegrationContext(self): - self.raise_exception_if_not_implemented(self.INTEGRATION, 'getIntegrationContext') - resObj = self.__do({'type': 'executeCommand', 'command': 'getIntegrationContext', 'args': {'refresh': False}}) - return resObj['context'] - - def setIntegrationContext(self, value): - self.raise_exception_if_not_implemented(self.INTEGRATION, 'setIntegrationContext') - return self.__do({'type': 'executeCommand', 'command': 'setIntegrationContext', - 'args': {'value': value, 'version': {"version": -1, "sequenceNumber": -1, "primaryTerm": -1}, - 'sync': False}}) - - def getIntegrationContextVersioned(self, refresh=False): - self.raise_exception_if_not_implemented(self.INTEGRATION, 'getIntegrationContextVersioned') - return self.__do({'type': 'executeCommand', 'command': 'getIntegrationContext', 'args': {'refresh': refresh}}) - - def setIntegrationContextVersioned(self, value, version, sync=False): - self.raise_exception_if_not_implemented(self.INTEGRATION, 'setIntegrationContextVersioned') - return self.__do({'type': 'executeCommand', 'command': 'setIntegrationContext', - 'args': {'value': value, 'version': version, 'sync': sync}}) - - def searchRelationships(self, filter=None): - return self.__do({'type': 'executeCommand', 'command': 'searchRelationships', 'args': {'filter': filter}}) - - def getLicenseID(self): - return self.__do({'type': 'executeCommand', 'command': 'getLicenseID', 'args': {}})['id'] - - def createIncidents(self, incidents, lastRun=None, userID=None): - self.raise_exception_if_not_implemented(self.INTEGRATION, 'createIncidents') - return self.__do({'type': 'executeCommand', 'command': 'createIncidents', - 'args': {'incidents': incidents, 'lastRun': lastRun, 'userID': userID}}) - - def createAlerts(self, alerts, lastRun=None, userID=None): - self.raise_exception_if_not_implemented(self.INTEGRATION, 'createAlerts') - return self.__do({'type': 'executeCommand', 'command': 'createAlerts', - 'args': {'alerts': alerts, 'lastRun': lastRun, 'userID': userID}}) - - def createIndicators(self, indicators, lastRun=None, noUpdate=False): - self.raise_exception_if_not_implemented(self.INTEGRATION, 'createIndicators') - return self.__do({'type': 'executeCommand', 'command': 'createIndicators', - 'args': {'indicators': indicators, 'lastRun': lastRun, 'noUpdate': noUpdate}}) - - def searchIndicators(self, value=None, query=None, size=None, page=None, sort=None, fromDate=None, toDate=None, - searchAfter=None, populateFields=None): - return self.__do({'type': 'executeCommand', 'command': 'searchIndicators', - 'args': {'value': value, 'query': query, 'size': size, 'page': page, 'sort': sort, - 'fromDate': fromDate, 'searchAfter': searchAfter, 'toDate': toDate, - 'populateFields': populateFields}}) - - def getIndexHash(self): - self.raise_exception_if_not_implemented(self.INTEGRATION, 'getIndexHash') - return self.__do({'type': 'executeCommand', 'command': 'getIndexHash'}) - - def updateModuleHealth(self, err, is_error=False): - self.raise_exception_if_not_implemented(self.INTEGRATION, 'updateModuleHealth') - return self.__do( - {'type': 'executeCommand', 'command': 'updateModuleHealth', 'args': {'err': err, 'isError': is_error}}) - - def addEntry(self, id, entry, username=None, email=None, footer=None): - self.raise_exception_if_not_implemented(self.INTEGRATION, 'addEntry') - return self.__do({'type': 'executeCommand', 'command': 'addEntry', 'args': {'id': id, 'username': username, - 'email': email, 'entry': entry, - 'footer': footer}}) - - def directMessage(self, message, username=None, email=None, anyoneCanOpenIncidents=None): - self.raise_exception_if_not_implemented(self.INTEGRATION, 'directMessage') - tmp = self.__do({'type': 'executeCommand', 'command': 'directMessage', - 'args': {'message': message, - 'username': username, - 'email': email, - 'anyoneCanOpenIncidents': anyoneCanOpenIncidents, - 'anyoneCanOpenAlerts': anyoneCanOpenIncidents}}) - if tmp is not None: - return tmp["res"] - - def mirrorInvestigation(self, id, mirrorType, autoClose=False): - self.raise_exception_if_not_implemented(self.INTEGRATION, 'mirrorInvestigation') - return self.__do({'type': 'executeCommand', 'command': 'mirrorInvestigation', 'args': {'id': id, - 'mirrorType': mirrorType, - 'autoClose': autoClose}}) - - def findUser(self, username="", email=""): - self.raise_exception_if_not_implemented(self.INTEGRATION, 'findUser') - return self.__do( - {'type': 'executeCommand', 'command': 'findUser', 'args': {'username': username, 'email': email}}) - - def handleEntitlementForUser(self, incidentID, guid, email, content, taskID=""): - self.raise_exception_if_not_implemented(self.INTEGRATION, 'handleEntitlementForUser') - return self.__do({'type': 'executeCommand', 'command': 'handleEntitlementForUser', - 'args': {'incidentID': incidentID, 'alertID': incidentID, - 'guid': guid, 'taskID': taskID, 'email': email, 'content': content}}) - - def mapObject(self, sourceObject, mapper, mapperType=""): - return self.__do({'type': 'executeCommand', 'command': 'mapObject', - 'args': {'source': sourceObject, 'mapper': mapper, 'mapperType': mapperType}}) - - def getAutoFocusApiKey(self): - resObj = self.__do({'type': 'executeCommand', 'command': 'getLicenseCustomField', 'args': {'key': 'autofocus'}}) - if resObj is not None: - return resObj['value'] - - def getLicenseCustomField(self, key): - resObj = self.__do({'type': 'executeCommand', 'command': 'getLicenseCustomField', 'args': {'key': key}}) - if resObj is not None: - return resObj['value'] - - def _apiCall(self, name, params=None, data=None): - self.raise_exception_if_not_implemented(self.INTEGRATION, '_apiCall') - return self.__do( - {'type': 'executeCommand', 'command': '_apiCall', 'args': {'name': name, 'params': params, 'data': data}}) - - def params(self): - if self.is_integration: - return self.callingContext.get(u'params', {}) - else: - return {} - - def command(self): - self.raise_exception_if_not_implemented(self.INTEGRATION, 'command') - return self.callingContext.get(u'command', '') - - def isFetch(self): - self.raise_exception_if_not_implemented(self.INTEGRATION, 'isFetch') - """ used to encapsulate/hide 'fetch-incident' command from the code """ - return self.command() == 'fetch-incidents' - - def get(self, obj, field, defaultParam=None): - """ Get the field from the given dict using dot notation """ - parts = field.split('.') - for part in parts: - if obj and part in obj: - obj = obj[part] - else: - return defaultParam - return obj - - def gets(self, obj, field): - return str(self.get(obj, field)) - - def getArg(self, arg, defaultParam=None): - return self.get(self.callingContext, 'args.' + arg, defaultParam) - - def execute(self, module, command, args): - self.raise_exception_if_not_implemented(self.SCRIPT, 'execute') - return self.__do({'type': 'execute', 'module': module, 'command': command.strip(), 'args': args}) - - def executeCommand(self, command, args): - self.raise_exception_if_not_implemented(self.SCRIPT, 'executeCommand') - return self.__do({'type': 'executeCommand', 'command': command.strip(), 'args': args}) - - def demistoUrls(self): - return self.__do({'type': 'demistoUrls'}) - - def demistoVersion(self): - return self.__do({'type': 'demistoVersion'}) - - def heartbeat(self, msg): - self.raise_exception_if_not_implemented(self.INTEGRATION, 'heartbeat') - return self.__do_no_res({'type': 'executeCommand', 'command': 'heartbeat', 'args': {'message': msg}}) - - def info(self, *args): - argsObj = {} - argsObj["args"] = list(args) - self.__do({'type': 'log', 'command': 'info', 'args': argsObj}) - - def error(self, *args): - argsObj = {} - argsObj["args"] = list(args) - self.__do({'type': 'log', 'command': 'error', 'args': argsObj}) - - def exception(self, ex): - self.raise_exception_if_not_implemented(self.SCRIPT, 'exception') - return self.__do({'type': 'exception', 'command': 'exception', 'args': ex}) - - def debug(self, *args): - argsObj = {} - argsObj["args"] = list(args) - self.__do({'type': 'log', 'command': 'debug', 'args': argsObj}) - - def getAllSupportedCommands(self): - self.raise_exception_if_not_implemented(self.SCRIPT, 'getAllSupportedCommands') - return self.__do({'type': 'getAllModulesSupportedCmds'}) - - def getModules(self): - self.raise_exception_if_not_implemented(self.SCRIPT, 'getModules') - return self.__do({'type': 'getAllModules'}) - - def setContext(self, name, value): - self.raise_exception_if_not_implemented(self.SCRIPT, 'setContext') - return self.__do({'type': 'setContext', 'name': name, 'value': value}) - - def getParam(self, p): - self.raise_exception_if_not_implemented(self.INTEGRATION, 'getParam') - return self.get(self.callingContext, 'params.' + p) - - def dt(self, data, q): - self.raise_exception_if_not_implemented(self.INTEGRATION, 'dt') - return self.__do({'type': 'dt', 'name': q, 'value': data})['result'] - - def __do_lock(self, lock, timeout): - if sys.version_info.major >= 3: - return lock.acquire(timeout=timeout) - else: - # python 2 doesn't have timeout we use polling - if timeout < 0: - return lock.acquire() - start = time.time() # type:ignore [name-defined] # noqa: F821 # pylint: disable=E0602 - while (time.time() - start) < timeout: # type:ignore [name-defined] # noqa: F821 # pylint: disable=E0602 - if lock.acquire(False): - return True - time.sleep(0.1) # type:ignore [name-defined] # noqa: F821 # pylint: disable=E0602 - # didn't get the lock - return False - - def __do_no_res(self, cmd): - lock = self.__stdout_lock - if lock is not None: - if not self.__do_lock(lock, self._stdout_lock_timeout): - raise RuntimeError('Timeout acquiring stdout lock') - try: - json.dump(cmd, sys.stdout) - sys.stdout.write('\n') - sys.stdout.flush() - finally: - if lock is not None: - lock.release() - - def __do(self, cmd): - lock = self.__stdout_lock - if lock is not None: - if not self.__do_lock(lock, self._stdout_lock_timeout): - raise RuntimeError('Timeout acquiring stdout lock') - try: - # Watch out, there is a duplicate copy of this method - json.dump(cmd, sys.stdout) - sys.stdout.write('\n') - sys.stdout.flush() - data = globals()['__readWhileAvailable']() - error_index = data.find(SERVER_ERROR_MARKER) - if error_index > -1: - offset = error_index + len(SERVER_ERROR_MARKER) - raise ValueError(data[offset:]) - return json.loads(data) - finally: - if lock is not None: - lock.release() - - def convert(self, results): - """ Convert whatever result into entry """ - self.raise_exception_if_not_implemented(self.SCRIPT, 'convert') - if type(results) is dict: - if 'Contents' in results and 'ContentsFormat' in results: - return results - else: - return {'Type': 1, 'Contents': json.dumps(results), 'ContentsFormat': 'json'} - if type(results) is list: - res = [] - for r in results: - res.append(self.convert(r)) - return res - if sys.version_info.major >= 3 and type(results) is bytes: - return {'Type': 1, 'Contents': results.decode('utf-8'), 'ContentsFormat': 'text'} - return {'Type': 1, 'Contents': str(results), 'ContentsFormat': 'text'} - - def __convert(self, results): - """ Convert whatever result into entry """ - self.raise_exception_if_not_implemented(self.INTEGRATION, '__convert') - if type(results) is dict: - if 'Contents' in results and 'ContentsFormat' in results: - return results - else: - return {'Type': 1, 'Contents': json.dumps(results), 'ContentsFormat': 'json'} - if type(results) is list: - res = [] - for r in results: - res.append(self.__convert(r)) - return res - if sys.version_info.major >= 3 and type(results) is bytes: - return {'Type': 1, 'Contents': results.decode('utf-8'), 'ContentsFormat': 'text'} - return {'Type': 1, 'Contents': str(results), 'ContentsFormat': 'text'} - - def results(self, results): - res = [] - if self.is_integration: - converted = self.__convert(results) - else: - converted = self.convert(results) - if type(converted) is list: - res = converted - else: - res.append(converted) - - self.__do_no_res({'type': 'result', 'results': res}) - - def fetchResults(self, incidents_or_alerts): - """ used to encapsulate/hide 'incidents' from the code """ - self.raise_exception_if_not_implemented(self.INTEGRATION, 'fetchResults') - self.incidents(incidents_or_alerts) - - def credentials(self, credentials): - self.raise_exception_if_not_implemented(self.INTEGRATION, 'credentials') - self.results({'Type': 1, 'Contents': json.dumps(credentials), 'ContentsFormat': 'json'}) - - -if "demisto" not in locals(): - try: - # try except for CommonServerPython tests. - demisto = Demisto(context) # type:ignore [name-defined] # noqa: F821 # pylint: disable=E0602 - except NameError: - pass - try: - import __builtin__ - from StringIO import StringIO -except ImportError: - # Python 3 - import builtins as __builtin__ # type:ignore[no-redef] - from io import StringIO - - -def demisto_print(*args): - global demisto - output = StringIO() - __builtin__.print(*args, file=output) - result = output.getvalue().strip() - demisto.log(result) # pylint: disable=E9012 - - -print = demisto_print + class DemistoWrapper(Demisto): # type:ignore [name-defined] # noqa: F821 # pylint: disable=E0602 + """A content-side wrapper to the builtin Demisto class. + All methods of this class can be executed in both scripts and integrations + (E.g., self.results). """ + + class DemistoScript(DemistoWrapper): + def getFilePath(self, id): + self.debug("Getting path of file entry with ID {}".format(id)) + return super(DemistoScript, self).getFilePath(id) + + class DemistoIntegration(DemistoWrapper): + def incidents(self, incidents): + if isinstance(incidents, list): + self.debug("[fetch-incidents] Creating {} incidents".format(len(incidents))) + super(DemistoIntegration, self).incidents(incidents) + + if demisto.callingContext.get('context', {}).get('IntegrationBrand', '') in DEMISTO_WRAPPER_INTEGRATIONS: + demisto.__class__ = DemistoIntegration + elif demisto.callingContext.get('context', {}).get('ScriptName', '') in DEMISTO_WRAPPER_SCRIPTS: + demisto.__class__ = DemistoScript + +except NameError: + # NameError will be raised only in tests, where a Demisto class isn't defined. + pass diff --git a/Packs/ApiModules/Scripts/DemistoClassApiModule/DemistoClassApiModule.yml b/Packs/ApiModules/Scripts/DemistoClassApiModule/DemistoClassApiModule.yml index 648eca8fff54..49c36e59bb6e 100644 --- a/Packs/ApiModules/Scripts/DemistoClassApiModule/DemistoClassApiModule.yml +++ b/Packs/ApiModules/Scripts/DemistoClassApiModule/DemistoClassApiModule.yml @@ -13,7 +13,7 @@ system: true scripttarget: 0 dependson: {} timeout: 0s -dockerimage: demisto/python3:3.10.1.25933 +dockerimage: demisto/python3:3.10.14.92207 fromversion: 6.8.0 tests: - No test diff --git a/Packs/ApiModules/Scripts/FormatURLApiModule/FormatURLApiModule.py b/Packs/ApiModules/Scripts/FormatURLApiModule/FormatURLApiModule.py new file mode 100644 index 000000000000..f30cccaafb31 --- /dev/null +++ b/Packs/ApiModules/Scripts/FormatURLApiModule/FormatURLApiModule.py @@ -0,0 +1,770 @@ +import ipaddress +import tldextract +import urllib.parse +from CommonServerPython import * +from re import Match + + +class URLError(Exception): + pass + + +class URLType: + """ + A class to represent an url and its parts + """ + + def __init__(self, raw_url: str): + self.raw = raw_url + self.scheme = '' + self.user_info = '' + self.hostname = '' + self.port = '' + self.path = '' + self.query = '' + self.fragment = '' + + def __str__(self): + return ( + f'Scheme = {self.scheme}\nUser_info = {self.user_info}\nHostname = {self.hostname}\nPort = {self.port}\n' + f'Path = {self.path}\nQuery = {self.query}\nFragment = {self.fragment}') + + +class URLCheck: + """ + This class will build and validate a URL based on "URL Living Standard" (https://url.spec.whatwg.org) + """ + sub_delims = ("!", "$", "&", "'", "(", ")", "*", "+", ",", ";", "=") + brackets = ("\"", "'", "[", "]", "{", "}", "(", ")") + + bracket_pairs = { + '{': '}', + '(': ')', + '[': ']', + '"': '"', + '\'': '\'', + } + + no_fetch_extract = tldextract.TLDExtract(suffix_list_urls=(), cache_dir=None) + + def __init__(self, original_url: str): + """ + Args: + original_url: The original URL input + + Attributes: + self.modified_url: The URL while being parsed by the formatter char by char + self.original_url: The original URL as it was inputted + self.url - The parsed URL and its parts (as a URLType object - see above) + self.base: A pointer to the first char of the section being checked and validated + self.output: The final URL output by the formatter + self.inside_brackets = A flag to indicate the parser index is within brackets + self.port = A flag to state that a port is found in the URL + self.query = A flag to state that a query is found in the URL + self.fragment = A flag to state that a fragment is found in the URL + self.done = A flag to state that the parser is done and no more parsing is needed + """ + + self.modified_url = original_url + self.original_url = original_url + self.url = URLType(original_url) + self.base = 0 # This attribute increases as the url is being parsed + self.output = '' + + self.inside_brackets = False + self.opening_bracket = '' + self.port = False + self.query = False + self.fragment = False + self.done = False + self.quoted = False + + if self.original_url: + self.remove_leading_chars() + + else: + raise URLError("Empty string given") + + if any(map(self.modified_url[:8].__contains__, ["//", "%3A", "%3a"])): + # The URL seems to have a scheme indicated by presence of "//" or "%3A" + self.scheme_check() + + host_end_position = -1 + special_chars = ("/", "?", "#") # Any one of these states the end of the host / authority part in a URL + + for char in special_chars: + try: + host_end_position = self.modified_url[self.base:].index(char) + break # index for the end of the part found, breaking loop + except ValueError: + continue # no reserved char found, URL has no path, query or fragment parts. + + try: + if "@" in self.modified_url[:host_end_position]: + # Checks if url has '@' sign in its authority part + + self.user_info_check() + + except ValueError: + # No '@' in url at all + pass + + self.host_check() + + if not self.done and self.port: + self.port_check() + + if not self.done: + self.path_check() + + if not self.done and self.query: + self.query_check() + + if not self.done and self.fragment: + self.fragment_check() + + while '%' in self.output: + unquoted = urllib.parse.unquote(self.output) + if unquoted != self.output: + self.output = unquoted + else: + break + + def __str__(self): + return f"{self.output}" + + def __repr__(self): + return f"{self.output}" + + def scheme_check(self): + """ + Parses and validates the scheme part of the URL, accepts ascii and "+", "-", "." according to standard. + """ + + index = self.base + scheme = '' + + while self.modified_url[index].isascii() or self.modified_url[index] in ("+", "-", "."): + + char = self.modified_url[index] + if char in self.sub_delims: + raise URLError(f"Invalid character {char} at position {index}") + + elif char == "%" or char == ":": + # The colon might appear as is or if the URL is quoted as "%3A" + + if char == "%": + # If % is present in the scheme it must be followed by "3A" to represent a colon (":") + + if self.modified_url[index + 1:index + 3].upper() != "3A": + raise URLError(f"Invalid character {char} at position {index}") + + else: + self.output += ":" + index += 3 + self.quoted = True + + if char == ":": + self.output += char + index += 1 + + if self.modified_url[index:index + 2] != "//": + # If URL has ascii chars and ':' with no '//' it is invalid + + raise URLError(f"Invalid character {char} at position {index}") + + else: + self.url.scheme = scheme + self.output += self.modified_url[index:index + 2] + self.base = index + 2 + + if self.base == len(self.modified_url): + raise URLError("Only scheme provided") + + return + + elif index == len(self.modified_url) - 1: + # Reached end of url and no ":" found (like "foo//") + + raise URLError('Invalid scheme') + + else: + # base is not incremented as it was incremented by 2 before + self.output += char + scheme += char + index += 1 + + def user_info_check(self): + """ + Parses and validates the user_info part of the URL. Will only accept a username, password isn't allowed. + """ + + index = self.base + user_info = "" + + if self.modified_url[index] == "@": + raise URLError(f"Invalid character {self.modified_url[index]} at position {index}") + + else: + while self.modified_url[index] not in ('@', '/', '?', '#', '[', ']'): + self.output += self.modified_url[index] + user_info += self.modified_url[index] + index += 1 + + if self.modified_url[index] == '@': + self.output += self.modified_url[index] + self.url.user_info = user_info + self.base = index + 1 + return + + else: + raise URLError(f"Invalid character {self.modified_url[index]} at position {index}") + + def host_check(self): + """ + Parses and validates the host part of the URL. The domain must be valid, either a domain, IPv4 or an + IPv6 with square brackets. + """ + + index = self.base + host: Any = '' + is_ip = False + + while index < len(self.modified_url) and self.modified_url[index] not in ('/', '?', '#'): + + if self.modified_url[index] in self.sub_delims: + if self.modified_url[index] in self.brackets: + # Just a small trick to stop the parsing if a bracket is found + index = len(self.modified_url) + self.check_done(index) + + else: + raise URLError(f"Invalid character {self.modified_url[index]} at position {index}") + + elif self.modified_url[index] == "%" and not self.hex_check(index): + raise URLError(f"Invalid character {self.modified_url[index]} at position {index}") + + elif self.modified_url[index] == ":" and not self.inside_brackets: + # ":" are only allowed if host is ipv6 in which case inside_brackets equals True + if index == len(self.modified_url) - 1: + raise URLError(f"Invalid character {self.modified_url[index]} at position {index}") + + elif index <= 4: + # This might be an IPv6 with no scheme + self.inside_brackets = True + self.output = f"[{self.output}" # Reading the bracket that was removed by the cleaner + + else: + self.port = True + self.output += self.modified_url[index] + index += 1 + self.base = index + self.url.hostname = host + return # Going back to main to handle port part + + elif self.modified_url[index] == "[": + if not self.inside_brackets and index == self.base: + # if index==base we're at the first char of the host in which "[" is ok + self.output += self.modified_url[index] + index += 1 + self.inside_brackets = True + + else: + raise URLError(f"Invalid character {self.modified_url[index]} at position {index}") + + elif self.modified_url[index] == "]": + + if not self.inside_brackets: + if self.check_domain(host) and all(char in self.brackets for char in self.modified_url[index:]): + # Domain is valid with trailing "]" and brackets, the formatter will remove the extra chars + self.done = True + return + + else: + raise URLError(f"Invalid character {self.modified_url[index]} at position {index}") + + else: + try: + ip = ipaddress.ip_address(host) + is_ip = True + + except ValueError: + raise URLError(f"Only IPv6 is allowed within square brackets, not {host}") + + if self.inside_brackets and ip.version == 6: + self.output += self.modified_url[index] + index += 1 + self.inside_brackets = False + break + + raise URLError(f"Only IPv6 is allowed within square brackets, not {host}") + + else: + self.output += self.modified_url[index] + host += self.modified_url[index] + index += 1 + + if not is_ip: + try: + ip = ipaddress.ip_address(host) + + if ip.version == 6 and not self.output.endswith(']'): + self.output = f"{self.output}]" # Adding a closing square bracket for IPv6 + + except ValueError: + self.check_domain(host) + + self.url.hostname = host + self.check_done(index) + + def port_check(self): + """ + Parses and validates the port part of the URL, accepts only digits. Index is starting after ":" + """ + + index = self.base + port = "" + + while index < len(self.modified_url) and self.modified_url[index] not in ('/', '?', '#'): + if self.modified_url[index].isdigit(): + self.output += self.modified_url[index] + port += self.modified_url[index] + index += 1 + + else: + raise URLError(f"Invalid character {self.modified_url[index]} at position {index}") + + self.url.port = port + self.check_done(index) + + def path_check(self): + """ + Parses and validates the path part of the URL. + """ + + index = self.base + path = "" + + while index < len(self.modified_url) and self.modified_url[index] not in ('?', '#'): + index, char = self.check_valid_character(index) + path += char + + if self.check_done(index): + self.url.path = path + self.output += path + return + + if self.modified_url[index] == "?": + self.query = True + + elif self.modified_url[index] == "#": + self.fragment = True + + self.output += path + self.output += self.modified_url[index] + index += 1 + self.base = index + self.url.path = path + + def query_check(self): + """ + Parses and validates the query part of the URL. The query starts after a "?". + """ + index = self.base + query = '' + + while index < len(self.modified_url) and self.modified_url[index] != '#': + index, char = self.check_valid_character(index) + query += char + + self.url.query = query + self.output += query + + if self.check_done(index): + return + + elif self.modified_url[index] == "#": + self.output += self.modified_url[index] + index += 1 + self.base = index + self.fragment = True + + def fragment_check(self): + """ + Parses and validates the fragment part of the URL, will not allow gen and sub delims unless encoded + """ + + index = self.base + fragment = "" + + while index < len(self.modified_url): + index, char = self.check_valid_character(index) + fragment += char + + self.url.fragment = fragment + self.output += fragment + + def check_valid_character(self, index: int) -> tuple[int, str]: + """ + Checks the validity of a character passed by the main formatter + + Args: + index: the index of the character within the URL + + Returns: + returns the new index after incrementation and the part of the URL that was checked + + """ + + part = "" + char = self.modified_url[index] + + if char == "%": + if not self.hex_check(index): + raise URLError(f"Invalid character {char} at position {index}") + + else: + part += char + index += 1 + + elif char in self.brackets: + # char is a type of bracket or quotation mark + + if index == len(self.modified_url) - 1 and not self.inside_brackets: + # Edge case of a bracket or quote at the end of the URL but not part of it + return len(self.modified_url), part + + elif self.inside_brackets and char == self.bracket_pairs[self.opening_bracket]: + # If the char is a closing bracket check that it matches the opening one. + self.inside_brackets = False + part += char + index += 1 + + elif char in self.bracket_pairs: + # If the char is an opening bracket set `inside_brackets` flag to True + self.inside_brackets = True + self.opening_bracket = char + part += char + index += 1 + + else: + # The char is a closing bracket but there was no opening one. + return len(self.modified_url), part + + elif char == '\\': + # Edge case of the url ending with an escape char + return len(self.modified_url), part + + elif not char.isalnum() and not self.check_codepoint_validity(char): + raise URLError(f"Invalid character {self.modified_url[index]} at position {index}") + + else: + part += char + index += 1 + + return index, part + + @staticmethod + def check_codepoint_validity(char: str) -> bool: + """ + Checks if a character from the URL is a valid code point, see + https://infra.spec.whatwg.org/#code-points for more information. # disable-secrets-detection + + Args: + char (str): A character derived from the URL + + Returns: + bool: Is the character a valid code point. + """ + url_code_points = ("!", "$", "&", "\"", "(", ")", "*", "+", ",", "-", ".", "/", ":", ";", "=", "?", "@", + "_", "~") + unicode_code_points = {"start": "\u00A0", "end": "\U0010FFFD"} + surrogate_characters = {"start": "\uD800", "end": "\uDFFF"} + non_characters = {"start": "\uFDD0", "end": "\uFDEF"} + + if surrogate_characters["start"] <= char <= surrogate_characters["end"]: + return False + + elif non_characters["start"] <= char <= non_characters["end"]: + return False + + elif char in url_code_points: + return True + + return unicode_code_points['start'] <= char <= unicode_code_points['end'] + + def check_domain(self, host: str) -> bool: + """ + Checks if the domain is a valid domain (has at least 1 dot and a tld >= 2) + + Args: + host: The host string as extracted by the formatter + + Returns: + True if the domain is valid + + Raises: + URLError if the domain is invalid + """ + + if host.endswith("."): + host = host.rstrip(".") + + if host.count(".") < 1: + raise URLError(f"Invalid domain {host}") + + elif len(host.split(".")[-1]) < 2: + raise URLError(f"Invalid tld for {host}") + + elif not self.no_fetch_extract(host).suffix: + raise URLError(f"Invalid tld for {host}") + + else: + return True + + def hex_check(self, index: int) -> bool: + """ + Checks the next two chars in the url are hex digits + + Args: + index: points to the position of the % character, used as a pointer to chars. + + Returns: + True if %xx is a valid hexadecimal code. + + Raises: + ValueError if the chars after % are invalid + """ + + try: + int(self.modified_url[index + 1:index + 3], 16) + return True + + except ValueError: + return False + + def check_done(self, index: int) -> bool: + """ + Checks if the validator already went over the URL and nothing is left to check. + + Args: + index: The current index of the pointer + + Returns: + True if the entire URL has been verified False if not. + """ + + if index == len(self.modified_url): + # End of inputted url, no need to test further + self.done = True + return True + + elif self.modified_url[index] == "/": + self.output += self.modified_url[index] + index += 1 + + self.base = index + return False + + def remove_leading_chars(self): + """ + Will remove all leading chars of the following ("\"", "'", "[", "]", "{", "}", "(", ")", ",") + from the URL. + """ + + beginning = 0 + end = -1 + + in_brackets = True + + while in_brackets: + try: + if self.bracket_pairs[self.modified_url[beginning]] == self.modified_url[end]: + beginning += 1 + end -= 1 + + else: + in_brackets = False + + except KeyError: + in_brackets = False + + while self.modified_url[beginning] in self.brackets: + beginning += 1 + + if end == -1: + self.modified_url = self.modified_url[beginning:] + + else: + self.modified_url = self.modified_url[beginning:end + 1] + + +class URLFormatter: + + # URL Security Wrappers + ATP_regex = re.compile('.*?[.]safelinks[.]protection[.](?:outlook|office365)[.](?:com|us)/.*?[?]url=(.*?)&', re.I) + fireeye_regex = re.compile('.*?fireeye[.]com.*?&u=(.*)', re.I) + proofpoint_regex = re.compile('(?i)(?:proofpoint.com/v[1-2]/(?:url\?u=)?(.+?)(?:&|&d|$)|' + 'https?(?::|%3A)//urldefense[.]\w{2,3}/v3/__(.+?)(?:__;|$))') + trendmicro_regex = re.compile('.*?trendmicro\.com(?::443)?/wis/clicktime/.*?/?url==3d(.*?)&', # disable-secrets-detection + re.I) + + # Scheme slash fixer + scheme_fix = re.compile("https?(:[/|\\\]*)") + + def __init__(self, original_url): + """ + Main class for formatting a URL + + Args: + original_url: The original URL in lower case + + Raises: + URLError if an exception occurs + """ + + self.original_url = original_url + self.output = '' + + url = self.correct_and_refang_url(self.original_url) + url = self.strip_wrappers(url) + url = self.correct_and_refang_url(url) + + try: + self.output = URLCheck(url).output + + except URLError: + raise + + def __repr__(self): + return f"{self.output}" + + def __str__(self): + return f"{self.output}" + + @staticmethod + def strip_wrappers(url: str) -> str: + """ + Allows for stripping of multiple safety wrappers of URLs + + Args: + url: The original wrapped URL + + Returns: + The URL without wrappers + """ + + wrapper = True + + while wrapper: + # Will strip multiple wrapped URLs, wrappers are finite the loop will stop once all wrappers were removed + + if "%3A" in url[:8].upper(): + # If scheme has %3A URL is probably quoted and should be unquoted + url = urllib.parse.unquote(url) + + if URLFormatter.fireeye_regex.match(url): + url = URLFormatter.fireeye_regex.findall(url)[0] + + elif URLFormatter.trendmicro_regex.match(url): + url = URLFormatter.trendmicro_regex.findall(url)[0] + + elif URLFormatter.ATP_regex.match(url): + url = URLFormatter.ATP_regex.findall(url)[0] + + elif URLFormatter.proofpoint_regex.findall(url): + url = URLFormatter.extract_url_proofpoint(URLFormatter.proofpoint_regex.findall(url)[0]) + + else: + wrapper = False + + return url + + @staticmethod + def extract_url_proofpoint(url: str) -> str: + """ + Extracts the domain from the Proofpoint wrappers using a regex + + Args: + url: The proofpoint wrapped URL + + Returns: + Unquoted extracted URL as a string + """ + + if url[0]: + # Proofpoint v1 and v2 + return urllib.parse.unquote(url[0].replace("-", "%").replace("_", "/")) + + else: + # Proofpoint v3 + return urllib.parse.unquote(url[1]) + + @staticmethod + def correct_and_refang_url(url: str) -> str: + """ + Refangs URL and corrects its scheme + + Args: + url: The original URL + + Returns: + Refnaged corrected URL + """ + + schemas = re.compile("(meow|hxxp)", re.IGNORECASE) + url = url.replace("[.]", ".") + url = url.replace("[:]", ":") + lower_url = url.lower() + if lower_url.startswith(('hxxp', 'meow')): + url = re.sub(schemas, "http", url, count=1) + + def fix_scheme(match: Match) -> str: + return re.sub(":(\\\\|/)*", "://", match.group(0)) + + return URLFormatter.scheme_fix.sub(fix_scheme, url) + + +def _is_valid_cidr(cidr: str) -> bool: + """ + Will check if "url" is a valid CIDR in order to ignore it + Args: + cidr: the suspected input + + Returns: + True if inout is a valid CIDR + + """ + if not cidr[-1].isdigit(): # precaution incase the regex caught an extra char by mistake + cidr = cidr[:-1] + + try: + ipaddress.ip_network(cidr) + return True + except ValueError: + return False + + +def format_urls(raw_urls: list[str]) -> list[str]: + formatted_urls: List[str] = [] + + for url in raw_urls: + formatted_url = '' + + if _is_valid_cidr(url): + # If input is a valid CIDR formatter will ignore it to let it become a CIDR + formatted_urls.append('') + continue + + try: + formatted_url = URLFormatter(url).output + + except URLError: + demisto.debug(traceback.format_exc()) + + except Exception: + demisto.debug(traceback.format_exc()) + + finally: + formatted_urls.append(formatted_url) + return formatted_urls diff --git a/Packs/ApiModules/Scripts/FormatURLApiModule/FormatURLApiModule.yml b/Packs/ApiModules/Scripts/FormatURLApiModule/FormatURLApiModule.yml new file mode 100644 index 000000000000..0fe2bcad15cf --- /dev/null +++ b/Packs/ApiModules/Scripts/FormatURLApiModule/FormatURLApiModule.yml @@ -0,0 +1,16 @@ +comment: Common code for url formatting. +commonfields: + id: FormatURLApiModule + version: -1 +name: FormatURLApiModule +script: '-' +subtype: python3 +tags: +- infra +- server +timeout: 0s +type: python +dockerimage: demisto/python3:3.10.12.66339 +tests: +- No Tests +fromversion: 5.0.0 diff --git a/Packs/ApiModules/Scripts/FormatURLApiModule/FormatURLApiModule_test.py b/Packs/ApiModules/Scripts/FormatURLApiModule/FormatURLApiModule_test.py new file mode 100644 index 000000000000..d18483685bdf --- /dev/null +++ b/Packs/ApiModules/Scripts/FormatURLApiModule/FormatURLApiModule_test.py @@ -0,0 +1,397 @@ +import pytest +from FormatURLApiModule import * + + +TEST_URL_HTTP = 'http://www.test.com' # disable-secrets-detection +TEST_URL_HTTPS = 'https://www.test.com' # disable-secrets-detection +TEST_URL_INNER_HXXP = 'http://www.testhxxp.com' # disable-secrets-detection + +NOT_FORMAT_TO_FORMAT = [ # Start of http:/ replacements. + ('http:/www.test.com', TEST_URL_HTTP), # disable-secrets-detection + ('https:/www.test.com', TEST_URL_HTTPS), # disable-secrets-detection + ('http:\\\\www.test.com', TEST_URL_HTTP), # disable-secrets-detection + ('https:\\\\www.test.com', TEST_URL_HTTPS), # disable-secrets-detection + ('http:\\www.test.com', TEST_URL_HTTP), # disable-secrets-detection + ('https:\\www.test.com', TEST_URL_HTTPS), # disable-secrets-detection + ('http:www.test.com', TEST_URL_HTTP), # disable-secrets-detection + ('https:www.test.com', TEST_URL_HTTPS), # disable-secrets-detection + # End of http/s replacements. + + # Start of hxxp/s replacements. + ('hxxp:/www.test.com', TEST_URL_HTTP), # disable-secrets-detection + ('hxxps:/www.test.com', TEST_URL_HTTPS), # disable-secrets-detection + ('hXXp:/www.test.com', TEST_URL_HTTP), # disable-secrets-detection + ('hXXps:/www.test.com', TEST_URL_HTTPS), # disable-secrets-detection + ('hxxp:/www.testhxxp.com', 'http://www.testhxxp.com'), # disable-secrets-detection + ('hXxp:/www.testhxxp.com', 'http://www.testhxxp.com'), # disable-secrets-detection + + + ('hxxp:\\www.test.com', TEST_URL_HTTP), # disable-secrets-detection + ('hxxps:\\www.test.com', TEST_URL_HTTPS), # disable-secrets-detection + ('hXXp:\\www.test.com', TEST_URL_HTTP), # disable-secrets-detection + ('hXXps:\\www.test.com', TEST_URL_HTTPS), # disable-secrets-detection + ('hxxps:/www.testhxxp.com', 'https://www.testhxxp.com'), # disable-secrets-detection + + ('hxxp:\\\\www.test.com', TEST_URL_HTTP), # disable-secrets-detection + ('hxxps:\\\\www.test.com', TEST_URL_HTTPS), # disable-secrets-detection + ('hXXp:\\\\www.test.com', TEST_URL_HTTP), # disable-secrets-detection + ('hXXps:\\\\www.test.com', TEST_URL_HTTPS), # disable-secrets-detection + # End of hxxp/s replacements. + + # start of meow/s replacements. + ('meow:/www.test.com', TEST_URL_HTTP), # disable-secrets-detection + ('meows:/www.test.com', TEST_URL_HTTPS), # disable-secrets-detection + ('meow:\\\\www.test.com', TEST_URL_HTTP), # disable-secrets-detection + ('meows:\\\\www.test.com', TEST_URL_HTTPS), # disable-secrets-detection + ('meow:\\www.test.com', TEST_URL_HTTP), # disable-secrets-detection + ('meow:\\www.meow.com', 'http://www.meow.com'), # disable-secrets-detection + ('meows:\\www.test.com', TEST_URL_HTTPS), # disable-secrets-detection + ('meows:\\www.meow.com', 'https://www.meow.com'), # disable-secrets-detection + # end of meow/s replacements. + + # Start of Sanity test, no replacement should be done. + (TEST_URL_HTTP, TEST_URL_HTTP), + (TEST_URL_HTTPS, TEST_URL_HTTPS), + # End of Sanity test, no replacement should be done. +] + +BRACKETS_URL_TO_FORMAT = [ + ('{[https://test1.test-api.com/test1/test2/s.testing]}', # disable-secrets-detection + 'https://test1.test-api.com/test1/test2/s.testing'), # disable-secrets-detection + ('"https://test1.test-api.com"', 'https://test1.test-api.com'), # disable-secrets-detection + ('[[https://test1.test-api.com]]', 'https://test1.test-api.com'), # disable-secrets-detection + ('[https://www.test.com]', 'https://www.test.com'), # disable-secrets-detection + ('https://www.test.com]', 'https://www.test.com'), # disable-secrets-detection + ('[https://www.test.com', 'https://www.test.com'), # disable-secrets-detection + ('[[https://www.test.com', 'https://www.test.com'), # disable-secrets-detection + ('\'https://www.test.com/test\'', 'https://www.test.com/test'), # disable-secrets-detection + ('\'https://www.test.com/?a=\'b\'\'', 'https://www.test.com/?a=\'b\''), # disable-secrets-detection +] + +ATP_REDIRECTS = [ + ('https://na01.safelinks.protection.outlook.com/?url=https%3A%2F%2Foffice.memoriesflower.com' # disable-secrets-detection + '%2FPermission%2Foffice.php&data=01%7C01%7Cdavid.levin%40mheducation.com' # disable-secrets-detection + '%7C0ac9a3770fe64fbb21fb08d50764c401%7Cf919b1efc0c347358fca0928ec39d8d5%7C0&sdata=PEoDOerQnha' # disable-secrets-detection + '%2FACafNx8JAep8O9MdllcKCsHET2Ye%2B4%3D&reserved=0', # disable-secrets-detection + 'https://office.memoriesflower.com/Permission/office.php'), # disable-secrets-detection + ('https://na01.safelinks.protection.outlook.com/?url=https%3A//urldefense.com/v3/__' # disable-secrets-detection + 'https%3A//google.com%3A443/search%3Fq%3Da%2Atest%26gs%3Dps__%3BKw%21-612Flbf0JvQ3kNJkRi5Jg&', # disable-secrets-detection + 'https://google.com:443/search?q=a*test&gs=ps'), # disable-secrets-detection + ('https://na01.safelinks.protection.outlook.com/?url=https%3A//urldefense.com/v3/__' # disable-secrets-detection + 'hxxps%3A//google.com%3A443/search%3Fq%3Da%2Atest%26gs%3Dps__%3BKw%21-612Flbf0JvQ3kNJkRi5Jg&', # disable-secrets-detection + 'https://google.com:443/search?q=a*test&gs=ps'), # disable-secrets-detection + ('http://nam12.safelinks.protection.outlook.com/' # disable-secrets-detection + '?url=http%3A%2F%2Fi.ms00.net%2Fsubscribe%3Fserver_action%3D' # disable-secrets-detection + 'Unsubscribe%26list%3Dvalintry2%26sublist%3D*%26msgid%3D1703700099.20966' # disable-secrets-detection + '%26email_address%3Dpaulameixner%2540curo.com&data=05%7C02%7Cpaulameixner%40curo.com%7C' # disable-secrets-detection + '93f0eea20f1c47350eb508dc07b40542%7C2dc14abb79414377a7d259f436e42867' # disable-secrets-detection + '%7C1%7C0%7C638393716982915257%7C' # disable-secrets-detection + 'Unknown%7CTWFpbGZsb3d8eyJWIjoiMC4wLjAwMDAiLCJQIjoiV2luMzIiLCJBTiI6Ik1haWwiLCJXVCI6Mn0%3D%7C' # disable-secrets-detection + '3000%7C%7C%7C&sdata=%2FwfuIapNXRbZBgLVK651uTH%2FwXrSZFqwdvhvWK6Azwk%3D&reserved=0', # disable-secrets-detection + 'http://i.ms00.net/subscribe?server_action=Unsubscribe&list=valintry2&' # disable-secrets-detection + 'sublist=*&msgid=1703700099.20966' # disable-secrets-detection + '&email_address=paulameixner@curo.com'), # disable-secrets-detection + ('hxxps://nam10.safelinks.protection.outlook.com/ap/w-59523e83/?url=hxxps://test.com/test&data=', + 'https://test.com/test'), + ('hxxps://nam10.safelinks.protection.office365.us/ap/w-59523e83/?url=hxxps://test.com/test&data=', + 'https://test.com/test') +] + +PROOF_POINT_REDIRECTS = [ + ('https://urldefense.proofpoint.com/v2/url?u=https-3A__example.com_something.html', # disable-secrets-detection + 'https://example.com/something.html'), # disable-secrets-detection + ('https://urldefense.proofpoint.com/v2/url?' # disable-secrets-detection + 'u=http-3A__links.mkt3337.com_ctt-3Fkn-3D3-26ms-3DMzQ3OTg3MDQS1-26r' # disable-secrets-detection + '-3DMzkxNzk3NDkwMDA0S0-26b-3D0-26j-3DMTMwMjA1ODYzNQS2-26mt-3D1-26rt-3D0&d=DwMFaQ&c' # disable-secrets-detection + '=Vxt5e0Osvvt2gflwSlsJ5DmPGcPvTRKLJyp031rXjhg&r=MujLDFBJstxoxZI_GKbsW7wxGM7nnIK__qZvVy6j9Wc&m' # disable-secrets-detection + '=QJGhloAyfD0UZ6n8r6y9dF-khNKqvRAIWDRU_K65xPI&s=ew-rOtBFjiX1Hgv71XQJ5BEgl9TPaoWRm_Xp9Nuo8bk&e=', # disable-secrets-detection + 'http://links.mkt3337.com/ctt?kn=3&ms=MzQ3OTg3MDQS1&r=MzkxNzk3NDkwMDA0S0&b=0&j=' # disable-secrets-detection + 'MTMwMjA1ODYzNQS2&mt=1&rt=0'), # disable-secrets-detection + ('https://urldefense.proofpoint.com/v1/url?u=http://www.bouncycastle.org/' # disable-secrets-detection + '&k=oIvRg1%2BdGAgOoM1BIlLLqw%3D%3D%0A' # disable-secrets-detection + '&r=IKM5u8%2B%2F%2Fi8EBhWOS%2BqGbTqCC%2BrMqWI%2FVfEAEsQO%2F0Y%3D%0A&m' # disable-secrets-detection + '=Ww6iaHO73mDQpPQwOwfLfN8WMapqHyvtu8jM8SjqmVQ%3D%0A&s' # disable-secrets-detection + '=d3583cfa53dade97025bc6274c6c8951dc29fe0f38830cf8e5a447723b9f1c9a', # disable-secrets-detection + 'http://www.bouncycastle.org/'), # disable-secrets-detection + ('https://urldefense.com/v3/__https://google.com:443/' # disable-secrets-detection + 'search?q=a*test&gs=ps__;Kw!-612Flbf0JvQ3kNJkRi5Jg' # disable-secrets-detection + '!Ue6tQudNKaShHg93trcdjqDP8se2ySE65jyCIe2K1D_uNjZ1Lnf6YLQERujngZv9UWf66ujQIQ$', # disable-secrets-detection + 'https://google.com:443/search?q=a*test&gs=ps'), # disable-secrets-detection + ('https://urldefense.us/v3/__https://google.com:443/' # disable-secrets-detection + 'search?q=a*test&gs=ps__;Kw!-612Flbf0JvQ3kNJkRi5Jg' # disable-secrets-detection + '!Ue6tQudNKaShHg93trcdjqDP8se2ySE65jyCIe2K1D_uNjZ1Lnf6YLQERujngZv9UWf66ujQIQ$', # disable-secrets-detection + 'https://google.com:443/search?q=a*test&gs=ps') # disable-secrets-detection +] + +FIREEYE_REDIRECT = [ + ('https://protect2.fireeye.com/v1/url?' # disable-secrets-detection + 'k=00bf92e9-5f24adeb-00beb0cd-0cc47aa88f82-a1f32e4f84d91cbe&q=1' # disable-secrets-detection + '&e=221919da-9d68-429a-a70e-9d8d836ca107&u=https%3A%2F%2Fwww.facebook.com%2FNamshiOfficial', # disable-secrets-detection + 'https://www.facebook.com/NamshiOfficial'), # disable-secrets-detection +] + +TRENDMICRO_REDIRECT = [ + ('https://imsva91-ctp.trendmicro.com:443/wis/clicktime/v1/query?' # disable-secrets-detection + 'url==3Dhttp%3a%2f%2fclick.sanantonioshoemakers.com' # disable-secrets-detection + '%2f%3fqs%3dba654fa7d9346fec1b=3fa6c55906d045be350d0ee6e3ed' # disable-secrets-detection + 'c4ff33ef33eacb79b79602f5aaf719ee16c3d24e8489293=4d3&' # disable-secrets-detection + 'umid=3DB8AB568B-E738-A205-9C9E-ECD7B0A0383F&auth==3D00e18db2b3f9ca3ba6337946518e0b003516e16e-' # disable-secrets-detection + '5a8d41640e706acd29c760ae7a8cd40=f664d6489', # disable-secrets-detection + 'http://click.sanantonioshoemakers.com/?qs=ba654fa7d9346fec1b=' # disable-secrets-detection + '3fa6c55906d045be350d0ee6e3edc4ff33ef33eacb' # disable-secrets-detection + '79b79602f5aaf719ee16c3d24e8489293=4d3'), # disable-secrets-detection +] + +FORMAT_USERINFO = [ + ('https://user@domain.com', 'https://user@domain.com') # disable-secrets-detection +] + +FORMAT_PORT = [ + ('www.test.com:443/path/to/file.html', 'www.test.com:443/path/to/file.html'), # disable-secrets-detection +] + +FORMAT_IPv4 = [ + ('https://1.2.3.4/path/to/file.html', 'https://1.2.3.4/path/to/file.html'), # disable-secrets-detection + ('1.2.3.4/path', '1.2.3.4/path'), # disable-secrets-detection + ('1.2.3.4/path/to/file.html', '1.2.3.4/path/to/file.html'), # disable-secrets-detection + ('http://142.42.1.1:8080/', 'http://142.42.1.1:8080/'), # disable-secrets-detection + ('http://142.42.1.1:8080', 'http://142.42.1.1:8080'), # disable-secrets-detection + ('http://223.255.255.254', 'http://223.255.255.254'), # disable-secrets-detection +] + +FORMAT_IPv6 = [ + ('[http://[2001:db8:3333:4444:5555:6666:7777:8888]]', # disable-secrets-detection + 'http://[2001:db8:3333:4444:5555:6666:7777:8888]'), # disable-secrets-detection + ('[2001:db8:3333:4444:5555:6666:7777:8888]', # disable-secrets-detection + '[2001:db8:3333:4444:5555:6666:7777:8888]'), # disable-secrets-detection + ('2001:db8:3333:4444:5555:6666:7777:8888', # disable-secrets-detection + '[2001:db8:3333:4444:5555:6666:7777:8888]'), # disable-secrets-detection +] + +FORMAT_PATH = [ + ('https://test.co.uk/test.html', 'https://test.co.uk/test.html'), # disable-secrets-detection + ('www.test.com/check', 'www.test.com/check'), # disable-secrets-detection + ('https://test.com/Test\\"', 'https://test.com/Test'), # disable-secrets-detection + ('https://www.test.com/a\\', 'https://www.test.com/a'), # disable-secrets-detection +] + +FORMAT_QUERY = [ + ('www.test.test.com/test.html?paramaters=testagain', # disable-secrets-detection + 'www.test.test.com/test.html?paramaters=testagain'), # disable-secrets-detection + ('https://www.test.test.com/test.html?paramaters=testagain', # disable-secrets-detection + 'https://www.test.test.com/test.html?paramaters=testagain'), # disable-secrets-detection + ('https://test.test.com/v2/test?test&test=[test]test', # disable-secrets-detection + 'https://test.test.com/v2/test?test&test=[test]test'), # disable-secrets-detection + ('https://test.dev?email=some@email.addres', # disable-secrets-detection + 'https://test.dev?email=some@email.addres'), # disable-secrets-detection +] + +FORMAT_FRAGMENT = [ + ('https://test.com#fragment3', 'https://test.com#fragment3'), # disable-secrets-detection + ('http://_23_11.redacted.com./#redactedredactedredacted', # disable-secrets-detection + 'http://_23_11.redacted.com./#redactedredactedredacted'), # disable-secrets-detection + ('https://test.com?a=b#fragment3', 'https://test.com?a=b#fragment3'), # disable-secrets-detection + ('https://test.com/?a=b#fragment3', 'https://test.com/?a=b#fragment3'), # disable-secrets-detection + ('https://test.dev#fragment', # disable-secrets-detection + 'https://test.dev#fragment') # disable-secrets-detection +] + +FORMAT_REFANG = [ + ('hxxps://www[.]cortex-xsoar[.]com', 'https://www.cortex-xsoar.com'), # disable-secrets-detection + ('https[:]//www.test.com/foo', 'https://www.test.com/foo'), # disable-secrets-detection + ('https[:]//www[.]test[.]com/foo', 'https://www.test.com/foo'), # disable-secrets-detection +] + +FORMAT_NON_ASCII = [ + ('http://☺.damowmow.com/', 'http://☺.damowmow.com/'), # disable-secrets-detection + ('http://ötest.com/', 'http://ötest.com/'), # disable-secrets-detection + ('https://testö.com/test.html', 'https://testö.com/test.html'), # disable-secrets-detection + ('www.testö.com/test.aspx', 'www.testö.com/test.aspx'), # disable-secrets-detection + ('https://www.teöst.com/', 'https://www.teöst.com/'), # disable-secrets-detection + ('https://www.test.se/Auth/?&rUrl=https://test.com/wp-images/amclimore@test.com', # disable-secrets-detection + 'https://www.test.se/Auth/?&rUrl=https://test.com/wp-images/amclimore@test.com'), # disable-secrets-detection + ('test.com/#/?q=(1,2)', "test.com/#/?q=(1,2)"), # disable-secrets-detection +] + +FORMAT_PUNYCODE = [ + ('http://xn--t1e2s3t4.com/testagain.aspx', 'http://xn--t1e2s3t4.com/testagain.aspx'), # disable-secrets-detection + ('https://www.xn--t1e2s3t4.com', 'https://www.xn--t1e2s3t4.com'), # disable-secrets-detection +] + +FORMAT_HEX = [ + ('ftps://foo.bar/baz%26bar', 'ftps://foo.bar/baz&bar'), # disable-secrets-detection + ('foo.bar/baz%26bar', 'foo.bar/baz&bar'), # disable-secrets-detection + ('https://foo.com/?key=foo%26bar', 'https://foo.com/?key=foo&bar'), # disable-secrets-detection + ('https%3A//foo.com/?key=foo%26bar', 'https://foo.com/?key=foo&bar'), # disable-secrets-detection +] + +FAILS = [ + ('[http://2001:db8:3333:4444:5555:6666:7777:8888]', # disable-secrets-detection + pytest.raises(URLError)), # IPv6 must have square brackets + ('http://142.42.1.1:aaa8080', # disable-secrets-detection + pytest.raises(URLError)), # invalid port + ('http://142.42.1.1:aaa', # disable-secrets-detection + pytest.raises(URLError)), # port contains non digits + ('https://test.com#fragment3#fragment3', # disable-secrets-detection + pytest.raises(URLError)), # Only one fragment allowed + ('ftps://foo.bar/baz%GG', # disable-secrets-detection + pytest.raises(URLError)), # Invalid hex code in path + ('https://www.%gg.com/', # disable-secrets-detection + pytest.raises(URLError)), # Non valid hexadecimal value in host + ('', # disable-secrets-detection + pytest.raises(URLError)), # Empty string + ('htt$p://test.com/', # disable-secrets-detection + pytest.raises(URLError)), # Invalid character in scheme + ('https://', # disable-secrets-detection + pytest.raises(URLError)), # Only scheme + ('https://test@/test', # disable-secrets-detection + pytest.raises(URLError)), # No host data, only scheme and user info + ('https://www.te$t.com/', # disable-secrets-detection + pytest.raises(URLError)), # Bad chars in host + ('https://www.[test].com/', # disable-secrets-detection + pytest.raises(URLError)), # Invalid square brackets + ('https://www.te]st.com/', # disable-secrets-detection + pytest.raises(URLError)), # Square brackets closing without opening + ('https://[192.168.1.1]', # disable-secrets-detection + pytest.raises(URLError)), # Only IPv6 allowed in square brackets + ('https://[www.test.com]', # disable-secrets-detection + pytest.raises(URLError)), # Only IPv6 allowed in square brackets + ('https://www/test/', # disable-secrets-detection + pytest.raises(URLError)), # invalid domain in host section (no tld) + ('https://www.t/', # disable-secrets-detection + pytest.raises(URLError)), # invalid domain in host section (single letter tld) + ('foo//', # disable-secrets-detection + pytest.raises(URLError)), # invalid input + ('test.test/test', # disable-secrets-detection + pytest.raises(URLError)), # invalid tld +] + +REDIRECT_TEST_DATA = ATP_REDIRECTS + PROOF_POINT_REDIRECTS + FIREEYE_REDIRECT + TRENDMICRO_REDIRECT + +FORMAT_TESTS = (BRACKETS_URL_TO_FORMAT + FORMAT_USERINFO + FORMAT_PORT + FORMAT_IPv4 + FORMAT_IPv6 + FORMAT_PATH + FORMAT_QUERY + + FORMAT_FRAGMENT + FORMAT_NON_ASCII + FORMAT_PUNYCODE + FORMAT_HEX) + +FORMAT_URL_TEST_DATA = NOT_FORMAT_TO_FORMAT + FORMAT_TESTS + + +class TestFormatURL: + @pytest.mark.parametrize('non_formatted_url, expected', NOT_FORMAT_TO_FORMAT) + def test_replace_protocol(self, non_formatted_url: str, expected: str): + """ + Given: + - non_formatted_url: A URL. + + When: + - Replacing protocol to http:// or https://. + + Then: + - Ensure for every expected protocol given, it is replaced with the expected value. + """ + url = URLFormatter('https://www.test.com/') + assert url.correct_and_refang_url(non_formatted_url) == expected + + @pytest.mark.parametrize('non_formatted_url, expected', FORMAT_HEX) + def test_hex_chars(self, non_formatted_url: str, expected: str): + """ + Given: + - non_formatted_url: A URL. + + When: + - Replacing protocol to http:// or https://. + + Then: + - Ensure for every expected protocol given, it is replaced with the expected value. + """ + url = URLCheck(non_formatted_url) + hex = non_formatted_url.find('%') + assert url.hex_check(hex) + + cidr_strings = [ + ("192.168.0.0/16", True), # Valid CIDR + ("192.168.0.0/16.", True), # Valid CIDR with an extra char caught by the regex + ("192.168.0.1/16", False), # Invalid CIDR + ("192.168.0.1/16.", False), # Invalid CIDR with an extra char caught by the regex + ] + + @pytest.mark.parametrize('input, expected', cidr_strings) + def test_is_valid_cidr(self, input: str, expected: str): + from FormatURLApiModule import _is_valid_cidr + """ + Given: + - non_formatted_url: A CIDR input. + + When: + - Regex caught a string with a CIDR structure. + + Then: + - Ensure the formatter avoids valid CIDRs. + """ + assert _is_valid_cidr(input) == expected + + @pytest.mark.parametrize('url_, expected', FORMAT_URL_TEST_DATA) + def test_format_url(self, url_: str, expected: str): + """ + Given: + - URL. + + When: + - Given URL needs to be formatted. + + Then: + - Ensure URL is formatted as expected + """ + + assert URLFormatter(url_).__str__() == expected + + @pytest.mark.parametrize('url_, expected', FAILS) + def test_exceptions(self, url_: str, expected): + """ + Checks the formatter raises the correct exception. + """ + + with expected: + assert URLFormatter(url_) is not None + + @pytest.mark.parametrize('url_, expected', REDIRECT_TEST_DATA) + def test_wrappers(self, url_: str, expected: str): + """ + Given: + - URL with redirect URL Proof Point v2. + + When: + - Given URL with redirect URL is valid. + + Then: + - Ensure redirected URL is returned. + """ + + assert URLFormatter(url_).__str__() == expected + + @pytest.mark.parametrize('url_, expected', [ + ('[https://urldefense.com/v3/__https://google.com:443/search?66ujQIQ$]', # disable-secrets-detection + 'https://google.com:443/search?66ujQIQ$'), # disable-secrets-detection + ('(https://urldefense.us/v3/__https://google.com:443/searchERujngZv9UWf66ujQIQ$)', # disable-secrets-detection + 'https://google.com:443/searchERujngZv9UWf66ujQIQ$'), # disable-secrets-detection + ('[https://testURL.com)', 'https://testURL.com'), # disable-secrets-detection + ('[https://testURL.com', 'https://testURL.com'), # disable-secrets-detection + ('[(https://testURL.com)]', 'https://testURL.com') # disable-secrets-detection + ]) + def test_remove_special_chars_from_start_and_end_of_url(self, url_, expected): + """ + Given: + - A URL to format. + + When: + - executing remove_special_chars_from_start_and_end_of_url function. + + Then: + - Ensure formatted URL is returned. + """ + assert URLFormatter(url_).__str__() == expected + + def test_url_class(self): + url = URLType('https://www.test.com') + + assert url.raw == 'https://www.test.com' + assert url.__str__() == ("Scheme = \nUser_info = \nHostname = \nPort = \n" + "Path = \nQuery = \nFragment = ") diff --git a/Packs/ApiModules/Scripts/MicrosoftApiModule/MicrosoftApiModule.py b/Packs/ApiModules/Scripts/MicrosoftApiModule/MicrosoftApiModule.py index 352307a3b346..a2245ebf7675 100644 --- a/Packs/ApiModules/Scripts/MicrosoftApiModule/MicrosoftApiModule.py +++ b/Packs/ApiModules/Scripts/MicrosoftApiModule/MicrosoftApiModule.py @@ -99,7 +99,7 @@ class Resources: 'geo-us': 'https://login.microsoftonline.com', 'geo-eu': 'https://login.microsoftonline.com', 'geo-uk': 'https://login.microsoftonline.com', - 'gcc': 'https://login.microsoftonline.us', + 'gcc': 'https://login.microsoftonline.com', 'gcc-high': 'https://login.microsoftonline.us', 'dod': 'https://login.microsoftonline.us', } diff --git a/Packs/ApiModules/Scripts/MicrosoftApiModule/MicrosoftApiModule.yml b/Packs/ApiModules/Scripts/MicrosoftApiModule/MicrosoftApiModule.yml index 67c0b37d2236..b487e679c8e7 100644 --- a/Packs/ApiModules/Scripts/MicrosoftApiModule/MicrosoftApiModule.yml +++ b/Packs/ApiModules/Scripts/MicrosoftApiModule/MicrosoftApiModule.yml @@ -13,7 +13,7 @@ system: true scripttarget: 0 dependson: {} timeout: 0s -dockerimage: demisto/crypto:1.0.0.61689 +dockerimage: demisto/crypto:1.0.0.94037 fromversion: 5.0.0 tests: - No test diff --git a/Packs/ApiModules/Scripts/TAXII2ApiModule/TAXII2ApiModule.py b/Packs/ApiModules/Scripts/TAXII2ApiModule/TAXII2ApiModule.py index 2d55b37184a8..02d1d18bfbb4 100644 --- a/Packs/ApiModules/Scripts/TAXII2ApiModule/TAXII2ApiModule.py +++ b/Packs/ApiModules/Scripts/TAXII2ApiModule/TAXII2ApiModule.py @@ -357,7 +357,7 @@ def create_manifest_entry(self, xsoar_indicator: dict, xsoar_type: str) -> dict: entry['version'] = parse(xsoar_indicator.get('modified')).strftime(STIX_DATE_FORMAT) # type: ignore[arg-type] return entry - def create_stix_object(self, xsoar_indicator: dict, xsoar_type: str, extensions_dict: dict = {}) -> tuple: + def create_stix_object(self, xsoar_indicator: dict, xsoar_type: str, extensions_dict: dict = {}) -> tuple[dict, dict, dict]: """ Args: @@ -379,7 +379,12 @@ def create_stix_object(self, xsoar_indicator: dict, xsoar_type: str, extensions_ is_sdo = True else: demisto.debug(f'No such indicator type: {xsoar_type} in stix format.') - return {}, {} + return {}, {}, {} + + indicator_value = xsoar_indicator.get("value") + if (stix_type == "file") and (get_hash_type(indicator_value) == "Unknown"): + demisto.debug(f"Skip indicator of type 'file' with value: '{indicator_value}', as it is not a valid hash.") + return {}, {}, {} created_parsed = parse(xsoar_indicator.get('timestamp')).strftime(STIX_DATE_FORMAT) # type: ignore[arg-type] @@ -399,7 +404,7 @@ def create_stix_object(self, xsoar_indicator: dict, xsoar_type: str, extensions_ stix_object['object_refs'] = [ref['objectstixid'] for ref in xsoar_indicator['CustomFields'].get('reportobjectreferences', [])] if is_sdo: - stix_object['name'] = xsoar_indicator.get('value') + stix_object['name'] = indicator_value stix_object = self.add_sdo_required_field_2_1(stix_object, xsoar_indicator) stix_object = self.add_sdo_required_field_2_0(stix_object, xsoar_indicator) else: diff --git a/Packs/ApiModules/Scripts/TAXII2ApiModule/TAXII2ApiModule_test.py b/Packs/ApiModules/Scripts/TAXII2ApiModule/TAXII2ApiModule_test.py index 9fae74d3d4b3..efb58fedecec 100644 --- a/Packs/ApiModules/Scripts/TAXII2ApiModule/TAXII2ApiModule_test.py +++ b/Packs/ApiModules/Scripts/TAXII2ApiModule/TAXII2ApiModule_test.py @@ -1922,6 +1922,25 @@ def test_create_stix_object(xsoar_indicator, xsoar_type, expected_stix_object, e assert extensions_dict == {} +def test_create_stix_object_unknown_file_hash(): + """ + Given: + - A XSOAR indicator of type 'File' and the value is an invalid hash. + When: + - Creating a stix object. + Then: + - Ensure the stix object is empty. + """ + cilent = XSOAR2STIXParser(server_version='2.1', fields_to_present={'name', 'type'}, types_for_indicator_sdo=[], + namespace_uuid=PAWN_UUID) + xsoar_indicator = {"value": "invalidhash"} + xsoar_type = FeedIndicatorType.File + stix_object, extension_definition, extensions_dict = cilent.create_stix_object(xsoar_indicator, xsoar_type) + assert stix_object == {} + assert extension_definition == {} + assert extensions_dict == {} + + def test_init_client_with_wrong_version(): """ Given: diff --git a/Packs/ApiModules/pack_metadata.json b/Packs/ApiModules/pack_metadata.json index bda3bda9fb32..b1efc15cfba7 100644 --- a/Packs/ApiModules/pack_metadata.json +++ b/Packs/ApiModules/pack_metadata.json @@ -2,7 +2,7 @@ "name": "ApiModules", "description": "API Modules", "support": "xsoar", - "currentVersion": "2.2.23", + "currentVersion": "2.2.24", "author": "Cortex XSOAR", "url": "https://www.paloaltonetworks.com/cortex", "email": "", diff --git a/Packs/ArcSightESM/Integrations/ArcSightESMv2/ArcSightESMv2.py b/Packs/ArcSightESM/Integrations/ArcSightESMv2/ArcSightESMv2.py index d003493aa5b1..e7a5a63429c7 100644 --- a/Packs/ArcSightESM/Integrations/ArcSightESMv2/ArcSightESMv2.py +++ b/Packs/ArcSightESM/Integrations/ArcSightESMv2/ArcSightESMv2.py @@ -2,13 +2,6 @@ from CommonServerPython import * from CommonServerUserPython import * -""" IMPORTS """ -from datetime import datetime -import requests -import base64 - -# disable insecure warnings -requests.packages.urllib3.disable_warnings() # type: ignore """ GLOBALS """ HEADERS = { @@ -620,9 +613,8 @@ def update_case(case_id, stage, severity): if not res.ok: demisto.debug(res.text) - return_error('Failed to get security update case {}. \nPlease make sure user have edit permissions,' - ' or case is unlocked. \nStatus Code: {}\nResponse Body: {}'.format(case_id, res.status_code, - res.text)) + return_error(f'Failed to get security update case {case_id}. \nPlease make sure user have edit permissions,' + f' or case is unlocked. \nStatus Code: {res.status_code}\nResponse Body: {res.text}') res_json = parse_json_response(res) if 'cas.updateResponse' in res_json and 'cas.return' in res_json.get('cas.updateResponse'): @@ -835,9 +827,7 @@ def entries_command(func): if not res.ok: demisto.debug(res.text) return_error("Failed to add entries. Please make sure to enter Active List resource ID" - "\nResource ID: {}\nStatus Code: {}\nRequest Body: {}\nResponse: {}".format(resource_id, - res.status_code, body, - res.text)) + f"\nResource ID: {resource_id}\nStatus Code: {res.status_code}\nRequest Body: {body}\nResponse: {res.text}") demisto.results("Success") @@ -944,7 +934,7 @@ def parse_json_response(response: requests.Response): 'Attempting to fix invalid escape sequences and parse the response again.') # Replace triple backslashes (where the last one doesn't escape anything) with two backslashes. - fixed_response_text = re.sub(r'[^\\]\\\\\\(?P[^\"\\])', r'\\\\\g', response.text) + fixed_response_text = re.sub(r'(? str: str: 'ok' if test passed, anything else will raise an exception and will fail the test. """ try: - client.fetch_by_aql_query('in:alerts', 1) + client.fetch_by_aql_query('in:alerts', 1, after=(datetime.now() - timedelta(minutes=1))) except Exception as e: raise DemistoException(f'Error in test-module: {e}') from e @@ -193,7 +200,7 @@ def test_module(client: Client) -> str: ''' HELPER FUNCTIONS ''' -def calculate_fetch_start_time(last_fetch_time: str | None, fetch_start_time: datetime | None): +def calculate_fetch_start_time(last_fetch_time: datetime | str | None, fetch_start_time: datetime | None): """ Calculates the fetch start time. There are three cases for fetch start time calculation: - Case 1: last_fetch_time exist in last_run, thus being prioritized (fetch-events / armis-get-events commands). @@ -203,7 +210,7 @@ def calculate_fetch_start_time(last_fetch_time: str | None, fetch_start_time: da page size of 'max_events' (fetch-events / armis-get-events commands). Args: - last_fetch_time (str | None): Last fetch time (from last run). + last_fetch_time (datetime | str | None): Last fetch time (from last run). fetch_start_time (datetime | None): Fetch start time. Raises: @@ -214,7 +221,11 @@ def calculate_fetch_start_time(last_fetch_time: str | None, fetch_start_time: da """ # case 1 if last_fetch_time: - last_fetch_datetime = arg_to_datetime(last_fetch_time) + if isinstance(last_fetch_time, str): + demisto.info(f"info-log: calculating_fetch_time for {last_fetch_time=}") + last_fetch_datetime = arg_to_datetime(last_fetch_time) + else: + last_fetch_datetime = last_fetch_time if not last_fetch_datetime: raise DemistoException(f'last_fetch_time is not a valid date: {last_fetch_time}') return last_fetch_datetime @@ -223,7 +234,7 @@ def calculate_fetch_start_time(last_fetch_time: str | None, fetch_start_time: da return fetch_start_time # case 3 else: - return None + return datetime.now() - timedelta(minutes=1) def are_two_datetime_equal_by_second(x: datetime, y: datetime): @@ -320,30 +331,39 @@ def fetch_by_event_type(client: Client, event_type: EVENT_TYPE, events: dict, ma fetch_start_time (datetime | None): Fetch start time. """ last_fetch_ids = f'{event_type.type}_last_fetch_ids' - last_fetch_time = f'{event_type.type}_last_fetch_time' + last_fetch_time_field = f'{event_type.type}_last_fetch_time' + last_fetch_next_field = f'{event_type.type}_last_fetch_next_field' demisto.debug(f'debug-log: handling event-type: {event_type.type}') - demisto.debug(f'debug-log: last run of type: {event_type.type} is: {last_run.get(last_fetch_time)}') - event_type_fetch_start_time = calculate_fetch_start_time(last_run.get(last_fetch_time), fetch_start_time) - - response = client.fetch_by_aql_query( + if last_fetch_time := last_run.get(last_fetch_time_field): + demisto.debug(f'debug-log: last run of type: {event_type.type} time is: {last_fetch_time}') + last_fetch_next = last_run.get(last_fetch_next_field, 0) + demisto.debug(f'debug-log: last run of type: {event_type.type} next is: {last_fetch_next}') + event_type_fetch_start_time = calculate_fetch_start_time(last_fetch_time, fetch_start_time) + response, next = client.fetch_by_aql_query( aql_query=event_type.aql_query, max_fetch=max_fetch, after=event_type_fetch_start_time, - order_by=event_type.order_by + order_by=event_type.order_by, + from_param=last_fetch_next ) demisto.debug(f'debug-log: fetched {len(response)} {event_type.type} from API') if response: new_events, next_run[last_fetch_ids] = dedup_events( response, last_run.get(last_fetch_ids, []), event_type.unique_id_key, event_type.order_by) - next_run[last_fetch_time] = new_events[-1].get(event_type.order_by) if new_events else last_run.get(last_fetch_time) - demisto.debug(f'debug-log: updated next_run with: {next_run[last_fetch_time]}') events.setdefault(event_type.dataset_name, []).extend(new_events) demisto.debug(f'debug-log: overall {len(new_events)} {event_type.dataset_name} (after dedup)') demisto.debug(f'debug-log: last {event_type.dataset_name} in list: {new_events[-1] if new_events else {}}') - else: - next_run.update(last_run) + + if not next: # we wish to update the time only in case the next is 0 because the next is relative to the time. + event_type_fetch_start_time = new_events[-1].get(event_type.order_by) if new_events else last_fetch_time + # can empty the list. + next_run[last_fetch_next_field] = next + if isinstance(event_type_fetch_start_time, datetime): + event_type_fetch_start_time = event_type_fetch_start_time.strftime(DATE_FORMAT) + next_run[last_fetch_time_field] = event_type_fetch_start_time + demisto.debug(f'debug-log: updated next_run for event type {event_type.type} with {next=} and {event_type_fetch_start_time=}') def fetch_events_for_specific_alert_ids(client: Client, alert, aql_alert_id): diff --git a/Packs/Armis/Integrations/ArmisEventCollector/ArmisEventCollector.yml b/Packs/Armis/Integrations/ArmisEventCollector/ArmisEventCollector.yml index 35d8acd72c44..342ed495eb80 100644 --- a/Packs/Armis/Integrations/ArmisEventCollector/ArmisEventCollector.yml +++ b/Packs/Armis/Integrations/ArmisEventCollector/ArmisEventCollector.yml @@ -99,7 +99,7 @@ script: script: '-' type: python subtype: python3 - dockerimage: demisto/python3:3.10.13.84405 + dockerimage: demisto/python3:3.10.14.92207 marketplaces: - marketplacev2 fromversion: 6.10.0 diff --git a/Packs/Armis/Integrations/ArmisEventCollector/ArmisEventCollector_test.py b/Packs/Armis/Integrations/ArmisEventCollector/ArmisEventCollector_test.py index a2201bb68ef9..101428d893e5 100644 --- a/Packs/Armis/Integrations/ArmisEventCollector/ArmisEventCollector_test.py +++ b/Packs/Armis/Integrations/ArmisEventCollector/ArmisEventCollector_test.py @@ -36,7 +36,7 @@ def test_initial_fetch_by_aql_query(self, mocker, dummy_client): 'time': '2023-01-01T01:00:10.123456+00:00' }]}} - second_response = {'data': {'next': None, 'results': [{ + second_response = {'data': {'next': 2, 'results': [{ 'unique_id': '2', 'time': '2023-01-01T01:00:20.123456+00:00' }]}} @@ -52,12 +52,13 @@ def test_initial_fetch_by_aql_query(self, mocker, dummy_client): expected_args = { 'url_suffix': '/search/', 'method': 'GET', 'params': {'aql': 'example_query after:2023-01-01T00:59:00', 'includeTotal': - 'true', 'length': 2, 'orderBy': 'time', 'from': 1}, + 'true', 'length': 1, 'orderBy': 'time', 'from': 1}, 'headers': {'Authorization': 'test_access_token', 'Accept': 'application/json'} } mocked_http_request = mocker.patch.object(Client, '_http_request', side_effect=[first_response, second_response]) - assert dummy_client.fetch_by_aql_query('example_query', 3) == expected_result + assert dummy_client.fetch_by_aql_query('example_query', 2, (datetime.now() + - timedelta(minutes=1))) == (expected_result, 2) mocked_http_request.assert_called_with(**expected_args) @@ -105,7 +106,7 @@ def test_continues_fetch_by_aql_query(self, mocker, dummy_client): from_arg = arg_to_datetime('2023-01-01T01:00:01') mocked_http_request = mocker.patch.object(Client, '_http_request', side_effect=[first_response, second_response]) - assert dummy_client.fetch_by_aql_query('example_query', 3, from_arg) == expected_result + assert dummy_client.fetch_by_aql_query('example_query', 3, from_arg) == (expected_result, 0) mocked_http_request.assert_called_with(**expected_args) @@ -120,12 +121,13 @@ class TestHelperFunction: # test_calculate_fetch_start_time parametrize arguments case_last_run_exist = (date_1, datetime_2, datetime_1) case_from_date_parameter = (None, datetime_1, datetime_1) # type: ignore - case_first_fetch_no_from_date_parameter = (None, None, None) + case_first_fetch_no_from_date_parameter = (None, None, datetime(2023, 1, 1, 0, 59)) @pytest.mark.parametrize( "last_fetch_time, fetch_start_time_param, expected_result", [ case_last_run_exist, case_from_date_parameter, case_first_fetch_no_from_date_parameter] ) + @freeze_time("2023-01-01 01:00:00") def test_calculate_fetch_start_time(self, last_fetch_time, fetch_start_time_param, expected_result): """ Given: @@ -138,6 +140,7 @@ def test_calculate_fetch_start_time(self, last_fetch_time, fetch_start_time_para Then: - Case 1: Prefer last_fetch_time from last run and convert it to a valid datetime object. - Case 2: Use provided fetch_start_time_param (usually current time) datetime object. + - Case 3: Should return the now time (freezed as 2023-01-01) + 1 minute. """ from ArmisEventCollector import calculate_fetch_start_time assert calculate_fetch_start_time(last_fetch_time, fetch_start_time_param) == expected_result @@ -221,15 +224,27 @@ def test_dedup_events(self, events, events_last_fetch_ids, from ArmisEventCollector import dedup_events assert dedup_events(events, events_last_fetch_ids, unique_id_key, event_order_by) == expected_result - def test_fetch_by_event_type(self, mocker, dummy_client): + @pytest.mark.parametrize( + "next_pointer, expected_last_run", [ + (4, {'events_last_fetch_ids': ['3'], 'events_last_fetch_next_field': 4, + 'events_last_fetch_time': '2023-01-01T01:00:20'}), + (0, {'events_last_fetch_ids': ['3'], 'events_last_fetch_next_field': 0, + 'events_last_fetch_time': '2023-01-01T01:00:30.123456+00:00'})] + ) + @freeze_time("2024-01-01 01:00:00") + def test_fetch_by_event_type(self, mocker, dummy_client, next_pointer, expected_last_run): """ Given: - - A valid event type arguments for API request (unique_id_key, aql_query, type). + - A valid event type arguments for API request (unique_id_key, aql_query, type) and a mocker for the response data. + - Case 1: A response data with a next pointer = 0. + - Case 2: A response data with a next pointer = 4. When: - Iterating over which event types to fetch. Then: - Perform fetch for the specific event type, update event list and update last run dictionary for next fetch cycle. + - Case 1: Should set the next to 0 and take the freezed now time as the next run time. + - Case 2: Should set the next to 4 and take the time of the last incident. """ from ArmisEventCollector import fetch_by_event_type event_type = EVENT_TYPE('unique_id', 'example:query', 'events', 'time', 'events') @@ -251,12 +266,12 @@ def test_fetch_by_event_type(self, mocker, dummy_client): 'time': '2023-01-01T01:00:30.123456+00:00' } ] - mocker.patch.object(Client, 'fetch_by_aql_query', return_value=response) + mocker.patch.object(Client, 'fetch_by_aql_query', return_value=(response, next_pointer)) fetch_by_event_type(dummy_client, event_type, events, 1, last_run, next_run, fetch_start_time_param) assert events['events'] == [{'unique_id': '3', 'time': '2023-01-01T01:00:30.123456+00:00'}] - assert next_run == {'events_last_fetch_ids': ['3'], 'events_last_fetch_time': '2023-01-01T01:00:30.123456+00:00'} + assert next_run == expected_last_run # test_add_time_to_events parametrize arguments case_one_event = ( @@ -447,8 +462,9 @@ class TestFetchFlow: ['Events'], events_with_different_time_1, {'events': events_with_different_time_1}, - {'events_last_fetch_ids': ['3'], - 'events_last_fetch_time': '2023-01-01T01:00:30.123456+00:00', 'access_token': 'test_access_token'} + {'events_last_fetch_ids': ['3'], 'events_last_fetch_next_field': 4, + 'events_last_fetch_time': '2023-01-01T01:00:00', 'access_token': 'test_access_token'}, + 4 ) case_second_fetch = ( # type: ignore @@ -460,8 +476,9 @@ class TestFetchFlow: ['Events'], events_with_different_time_2, {'events': events_with_different_time_2}, - {'events_last_fetch_ids': ['7', '6'], - 'events_last_fetch_time': '2023-01-01T01:01:00.123456+00:00', 'access_token': 'test_access_token'} + {'events_last_fetch_ids': ['7', '6'], 'events_last_fetch_next_field': 8, + 'events_last_fetch_time': '2023-01-01T01:00:30', 'access_token': 'test_access_token'}, + 8 ) case_second_fetch_with_duplicates = ( # type: ignore 1000, @@ -479,8 +496,9 @@ class TestFetchFlow: 'unique_id': '7', 'time': '2023-01-01T01:01:00.123456+00:00' }]}, - {'events_last_fetch_ids': ['7', '6'], - 'events_last_fetch_time': '2023-01-01T01:01:00.123456+00:00', 'access_token': 'test_access_token'} + {'events_last_fetch_ids': ['7', '6'], 'events_last_fetch_next_field': 8, + 'events_last_fetch_time': '2023-01-01T01:00:30', 'access_token': 'test_access_token'}, + 8 ) case_no_new_event_from_fetch = ( # type: ignore @@ -492,8 +510,9 @@ class TestFetchFlow: ['Events'], {}, {}, - {'events_last_fetch_ids': ['1', '2', '3'], - 'events_last_fetch_time': '2023-01-01T01:00:30.123456+00:00', 'access_token': 'test_access_token'} + {'events_last_fetch_next_field': 4, + 'events_last_fetch_time': '2023-01-01T01:00:30', 'access_token': 'test_access_token'}, + 4 ) case_all_events_from_fetch_have_the_same_time = ( # type: ignore @@ -505,16 +524,17 @@ class TestFetchFlow: ['Events'], events_with_same_time, {'events': events_with_same_time}, - {'events_last_fetch_ids': ['1', '2', '3', '4', '5', '6'], - 'events_last_fetch_time': '2023-01-01T01:00:30.123456+00:00', 'access_token': 'test_access_token'} + {'events_last_fetch_ids': ['1', '2', '3', '4', '5', '6'], 'events_last_fetch_next_field': 7, + 'events_last_fetch_time': '2023-01-01T01:00:30', 'access_token': 'test_access_token'}, + 7 ) @ pytest.mark.parametrize('max_fetch, devices_max_fetch, last_run, fetch_start_time, event_types_to_fetch, response, events,\ - next_run', [case_first_fetch, case_second_fetch, case_second_fetch_with_duplicates, - case_no_new_event_from_fetch, case_all_events_from_fetch_have_the_same_time - ]) + next_run, next', [case_first_fetch, case_second_fetch, case_second_fetch_with_duplicates, + case_no_new_event_from_fetch, case_all_events_from_fetch_have_the_same_time + ]) def test_fetch_flow_cases(self, mocker, dummy_client, max_fetch, devices_max_fetch, last_run, - fetch_start_time, event_types_to_fetch, response, events, next_run): + fetch_start_time, event_types_to_fetch, response, events, next_run, next): """ Given: - Case 1: First fetch, response has 3 events with different timestamps. @@ -533,7 +553,7 @@ def test_fetch_flow_cases(self, mocker, dummy_client, max_fetch, devices_max_fet """ from ArmisEventCollector import fetch_events - mocker.patch.object(Client, 'fetch_by_aql_query', return_value=response) + mocker.patch.object(Client, 'fetch_by_aql_query', return_value=(response, next)) mocker.patch.dict(EVENT_TYPES, {'Events': EVENT_TYPE('unique_id', 'events_query', 'events', 'time', 'events')}) assert fetch_events(dummy_client, max_fetch, devices_max_fetch, last_run, fetch_start_time, event_types_to_fetch, None) == (events, next_run) @@ -567,8 +587,10 @@ def test_token_expires_in_runtime(self, mocker, dummy_client): 'unique_id': '3', 'time': '2023-01-01T01:00:30.123456+00:00' } - ] - }} + ], + 'next': 4 + } + } fetch_start_time = arg_to_datetime('2023-01-01T01:00:00') mocker.patch.object(Client, '_http_request', side_effect=[DemistoException( message='Invalid access token'), events_with_different_time]) @@ -576,7 +598,8 @@ def test_token_expires_in_runtime(self, mocker, dummy_client): mocker.patch.object(Client, 'update_access_token') if fetch_start_time: last_run = {'events_last_fetch_ids': ['3'], - 'events_last_fetch_time': '2023-01-01T01:00:30.123456+00:00', + 'events_last_fetch_next_field': 4, + 'events_last_fetch_time': '2023-01-01T01:00:00', 'access_token': 'test_access_token'} assert fetch_events(dummy_client, 1000, 1000, {}, fetch_start_time, [ 'Events'], None) == ({'events': events_with_different_time['data']['results']}, last_run) @@ -600,7 +623,8 @@ def test_fetch_alert_flow(self, mocker, dummy_client): 'activityUUIDs': ['123', '456'], 'deviceIds': ['789', '012'], 'time': '2023-01-01T01:00:10.123456+00:00' - }] + }], + 'next': 2 }} activities_response = { 'data': @@ -626,7 +650,8 @@ def test_fetch_alert_flow(self, mocker, dummy_client): expected_result['devicesData'] = devices_response['data']['results'] if fetch_start_time: last_run = {'alerts_last_fetch_ids': [''], - 'alerts_last_fetch_time': '2023-01-01T01:00:10.123456+00:00', + 'alerts_last_fetch_next_field': 2, + 'alerts_last_fetch_time': '2023-01-01T01:00:00', 'access_token': 'test_access_token'} - assert fetch_events(dummy_client, 1000, 1000, {}, fetch_start_time, [ + assert fetch_events(dummy_client, 1, 1, {}, fetch_start_time, [ 'Alerts'], None) == ({'alerts': [expected_result]}, last_run) diff --git a/Packs/Armis/Integrations/ArmisEventCollector/README.md b/Packs/Armis/Integrations/ArmisEventCollector/README.md index 3c5918015dc7..58bb64f8a7c0 100644 --- a/Packs/Armis/Integrations/ArmisEventCollector/README.md +++ b/Packs/Armis/Integrations/ArmisEventCollector/README.md @@ -1,6 +1,10 @@ Collects alerts, devices and activities from Armis resources. This integration was integrated and tested with API V.1.8 of Armis API. +<~XSIAM> +This is the default integration for this content pack when configured by the Data Onboarder. + + ## Configure Armis Event Collector on Cortex XSOAR 1. Navigate to **Settings** > **Integrations** > **Servers & Services**. diff --git a/Packs/Armis/ReleaseNotes/1_1_13.md b/Packs/Armis/ReleaseNotes/1_1_13.md new file mode 100644 index 000000000000..f25b315cea95 --- /dev/null +++ b/Packs/Armis/ReleaseNotes/1_1_13.md @@ -0,0 +1,7 @@ + +#### Integrations + +##### Armis Event Collector + +- Improved implementation of the event collection mechanism. +- Updated the Docker image to: *demisto/python3:3.10.14.92207*. \ No newline at end of file diff --git a/Packs/Armis/pack_metadata.json b/Packs/Armis/pack_metadata.json index b56acdf69833..81f490699cdf 100755 --- a/Packs/Armis/pack_metadata.json +++ b/Packs/Armis/pack_metadata.json @@ -2,7 +2,7 @@ "name": "Armis", "description": "Agentless and passive security platform that sees, identifies, and classifies every device, tracks behavior, identifies threats, and takes action automatically to protect critical information and systems", "support": "partner", - "currentVersion": "1.1.12", + "currentVersion": "1.1.13", "author": "Armis Corporation", "url": "https://support.armis.com/", "email": "support@armis.com", @@ -17,5 +17,6 @@ "marketplaces": [ "xsoar", "marketplacev2" - ] + ], + "defaultDataSource": "ArmisEventCollector" } \ No newline at end of file diff --git a/Packs/Asset/ReleaseNotes/1_0_8.md b/Packs/Asset/ReleaseNotes/1_0_8.md new file mode 100644 index 000000000000..203fd737cf46 --- /dev/null +++ b/Packs/Asset/ReleaseNotes/1_0_8.md @@ -0,0 +1,3 @@ +## Asset + +- Locked dependencies of the pack to ensure stability for versioned core packs. No changes in this release. diff --git a/Packs/Asset/pack_metadata.json b/Packs/Asset/pack_metadata.json index 31b4b3d40efe..4f5d4a24bd66 100644 --- a/Packs/Asset/pack_metadata.json +++ b/Packs/Asset/pack_metadata.json @@ -2,7 +2,7 @@ "name": "Asset", "description": "Base pack for any packs using asset fields.", "support": "xsoar", - "currentVersion": "1.0.7", + "currentVersion": "1.0.8", "author": "Cortex XSOAR", "url": "https://www.paloaltonetworks.com/cortex", "email": "", diff --git a/Packs/AutoFocus/ReleaseNotes/2_2_1.md b/Packs/AutoFocus/ReleaseNotes/2_2_1.md new file mode 100644 index 000000000000..fee86f06e103 --- /dev/null +++ b/Packs/AutoFocus/ReleaseNotes/2_2_1.md @@ -0,0 +1,3 @@ +## AutoFocus by Palo Alto Networks + +- Locked dependencies of the pack to ensure stability for versioned core packs. No changes in this release. diff --git a/Packs/AutoFocus/pack_metadata.json b/Packs/AutoFocus/pack_metadata.json index 92f78635cc85..54323b701ab1 100644 --- a/Packs/AutoFocus/pack_metadata.json +++ b/Packs/AutoFocus/pack_metadata.json @@ -2,7 +2,7 @@ "name": "AutoFocus by Palo Alto Networks", "description": "Use the Palo Alto Networks AutoFocus integration to distinguish the most\n important threats from everyday commodity attacks.", "support": "xsoar", - "currentVersion": "2.2.0", + "currentVersion": "2.2.1", "author": "Cortex XSOAR", "url": "https://www.paloaltonetworks.com/cortex", "email": "", diff --git a/Packs/Aws-SecretsManager/Integrations/AwsSecretsManager/AwsSecretsManager_test.py b/Packs/Aws-SecretsManager/Integrations/AwsSecretsManager/AwsSecretsManager_test.py index fdd7cd7248eb..1d785c8812d9 100644 --- a/Packs/Aws-SecretsManager/Integrations/AwsSecretsManager/AwsSecretsManager_test.py +++ b/Packs/Aws-SecretsManager/Integrations/AwsSecretsManager/AwsSecretsManager_test.py @@ -48,8 +48,8 @@ def test_aws_secrets_manager_secret_list_command(mocker): aws_client = create_client() mocker.patch.object(AWSClient, "aws_session", return_value=Boto3Client()) mocker.patch.object(Boto3Client, 'list_secrets', - return_value={'SecretList': [{'ARN': 'arn:aws:secretsmanager:eu-central-1:654338056632:secret:' - 'test_for_moishy-fVYXb6', 'Name': 'test_for_moishy', + return_value={'SecretList': [{'ARN': 'arn:aws:secretsmanager:eu-central-1:123456789012:secret:' + 'test_account', 'Name': 'test_for_moishy', 'Description': 'new description', 'LastChangedDate': None, 'Tags': [], 'SecretVersionsToStages': {'01cba660-28be-45d7-8597-d1ab295b0f35': ['AWSCURRENT'], diff --git a/Packs/Aws-SecretsManager/Integrations/AwsSecretsManager/README.md b/Packs/Aws-SecretsManager/Integrations/AwsSecretsManager/README.md index 36b13ef46197..2fe0ae91fa08 100644 --- a/Packs/Aws-SecretsManager/Integrations/AwsSecretsManager/README.md +++ b/Packs/Aws-SecretsManager/Integrations/AwsSecretsManager/README.md @@ -95,7 +95,7 @@ Retrieve all secrets. }, "SecretList": [ { - "ARN": "arn:aws:secretsmanager:eu-central-1:654338056632:secret:fdff-vnNyyc", + "ARN": "arn:aws:secretsmanager:eu-central-1:123456789012:secret:fdff-vnNyyc", "CreatedDate": "2022-09-04T09:10:12", "LastAccessedDate": "2022-10-23T00:00:00", "LastChangedDate": "2022-10-23T13:40:55", @@ -108,7 +108,7 @@ Retrieve all secrets. "Tags": [] }, { - "ARN": "arn:aws:secretsmanager:eu-central-1:654338056632:secret:gmail-oF08mg", + "ARN": "arn:aws:secretsmanager:eu-central-1:123456789012:secret:gmail-oF08mg", "CreatedDate": "2022-08-31T09:47:24", "LastAccessedDate": "2022-10-23T00:00:00", "LastChangedDate": "2022-08-31T09:47:24", @@ -121,7 +121,7 @@ Retrieve all secrets. "Tags": [] }, { - "ARN": "arn:aws:secretsmanager:eu-central-1:654338056632:secret:DB_credentials-3ic9K7", + "ARN": "arn:aws:secretsmanager:eu-central-1:123456789012:secret:DB_credentials-3ic9K7", "CreatedDate": "2022-08-31T09:45:33", "LastAccessedDate": "2022-10-23T00:00:00", "LastChangedDate": "2022-08-31T09:45:33", @@ -134,7 +134,7 @@ Retrieve all secrets. "Tags": [] }, { - "ARN": "arn:aws:secretsmanager:eu-central-1:654338056632:secret:test_for_moishy-fVYXb6", + "ARN": "arn:aws:secretsmanager:eu-central-1:123456789012:secret:test_account", "CreatedDate": "2022-08-21T13:54:05", "Description": "new description", "LastAccessedDate": "2022-10-23T00:00:00", @@ -162,10 +162,10 @@ Retrieve all secrets. >### AWS Secrets List >|ARN|Description|LastAccessedDate|Name| >|---|---|---|---| ->| arn:aws:secretsmanager:eu-central-1:654338056632:secret:fdff-vnNyyc | | 2022-10-23T13:40:55 | fdff | ->| arn:aws:secretsmanager:eu-central-1:654338056632:secret:gmail-oF08mg | | 2022-08-31T09:47:24 | gmail | ->| arn:aws:secretsmanager:eu-central-1:654338056632:secret:DB_credentials-3ic9K7 | | 2022-08-31T09:45:33 | DB_credentials | ->| arn:aws:secretsmanager:eu-central-1:654338056632:secret:test_for_moishy-fVYXb6 | new description | 2022-09-08T07:14:13 | test_for_moishy | +>| arn:aws:secretsmanager:eu-central-1:123456789012:secret:fdff-vnNyyc | | 2022-10-23T13:40:55 | fdff | +>| arn:aws:secretsmanager:eu-central-1:123456789012:secret:gmail-oF08mg | | 2022-08-31T09:47:24 | gmail | +>| arn:aws:secretsmanager:eu-central-1:123456789012:secret:DB_credentials-3ic9K7 | | 2022-08-31T09:45:33 | DB_credentials | +>| arn:aws:secretsmanager:eu-central-1:123456789012:secret:test_account | new description | 2022-09-08T07:14:13 | test_for_moishy | ### aws-secrets-manager-secret–value-get @@ -215,7 +215,7 @@ Retrieve a secret value by key. "SecretsManager": { "Secret": { "SecretValue": { - "ARN": "arn:aws:secretsmanager:eu-central-1:654338056632:secret:fdff-vnNyyc", + "ARN": "arn:aws:secretsmanager:eu-central-1:123456789012:secret:fdff-vnNyyc", "CreatedDate": "2022-09-04T09:10:13", "Name": "fdff", "ResponseMetadata": { @@ -246,7 +246,7 @@ Retrieve a secret value by key. >### AWS Get Secret >|ARN|CreatedDate|Name|SecretBinary|SecretString| >|---|---|---|---|---| ->| arn:aws:secretsmanager:eu-central-1:654338056632:secret:fdff-vnNyyc | 2022-09-04T09:10:13 | fdff | | {"password":"cvcvcv","username":"cvcvcv"} | +>| arn:aws:secretsmanager:eu-central-1:123456789012:secret:fdff-vnNyyc | 2022-09-04T09:10:13 | fdff | | {"password":"cvcvcv","username":"cvcvcv"} | ### aws-secrets-manager-secret–delete @@ -345,7 +345,7 @@ Get the Secret Manager policy for a specific secret. "AWS": { "SecretsManager": { "Policy": { - "ARN": "arn:aws:secretsmanager:eu-central-1:654338056632:secret:fdff-vnNyyc", + "ARN": "arn:aws:secretsmanager:eu-central-1:123456789012:secret:fdff-vnNyyc", "Name": "fdff", "ResponseMetadata": { "HTTPHeaders": { @@ -369,5 +369,5 @@ Get the Secret Manager policy for a specific secret. >### AWS Secret Policy >|ARN|Name|Policy| >|---|---|---| ->| arn:aws:secretsmanager:eu-central-1:654338056632:secret:fdff-vnNyyc | fdff | | +>| arn:aws:secretsmanager:eu-central-1:123456789012:secret:fdff-vnNyyc | fdff | | diff --git a/Packs/Axonius/Integrations/Axonius/Axonius.yml b/Packs/Axonius/Integrations/Axonius/Axonius.yml index 76ed859c68e5..d1fd4d63f47f 100644 --- a/Packs/Axonius/Integrations/Axonius/Axonius.yml +++ b/Packs/Axonius/Integrations/Axonius/Axonius.yml @@ -31,6 +31,7 @@ script: required: true - description: The maximum number of results to return. name: max_results + defaultValue: "50" description: Gather device info by saved query. name: axonius-get-devices-by-savedquery outputs: @@ -70,6 +71,7 @@ script: required: true - description: The maximum number of results to return. name: max_results + defaultValue: "50" description: Gather user info by saved query. name: axonius-get-users-by-savedquery outputs: @@ -103,6 +105,7 @@ script: required: true - description: The maximum number of results to return. name: max_results + defaultValue: "50" - description: Comma separated list of Axonius fields to return. isArray: true name: fields @@ -139,6 +142,7 @@ script: required: true - description: The maximum number of results to return. name: max_results + defaultValue: "50" - description: Comma separated list of Axonius fields to return. isArray: true name: fields @@ -175,6 +179,7 @@ script: required: true - description: The maximum number of results to return. name: max_results + defaultValue: "50" - description: Comma separated list of Axonius fields to return. isArray: true name: fields @@ -211,6 +216,7 @@ script: required: true - description: The maximum number of results to return. name: max_results + defaultValue: "50" - description: Comma separated list of Axonius fields to return. isArray: true name: fields @@ -247,6 +253,7 @@ script: required: true - description: The maximum number of results to return. name: max_results + defaultValue: "50" - description: Comma separated list of Axonius fields to return. isArray: true name: fields @@ -289,6 +296,7 @@ script: required: true - description: The maximum number of results to return. name: max_results + defaultValue: "50" - description: Comma separated list of Axonius fields to return. isArray: true name: fields @@ -331,6 +339,7 @@ script: required: true - description: The maximum number of results to return. name: max_results + defaultValue: "50" - description: Comma separated list of Axonius fields to return. isArray: true name: fields @@ -373,6 +382,7 @@ script: required: true - description: The maximum number of results to return. name: max_results + defaultValue: "50" - description: Comma separated list of Axonius fields to return. isArray: true name: fields @@ -415,6 +425,7 @@ script: required: true - description: The maximum number of results to return. name: max_results + defaultValue: "50" - description: Comma separated list of Axonius fields to return. isArray: true name: fields @@ -457,6 +468,7 @@ script: required: true - description: The maximum number of results to return. name: max_results + defaultValue: "50" - description: Comma separated list of Axonius fields to return. isArray: true name: fields @@ -568,7 +580,7 @@ script: - contextPath: Axonius.tags description: Axonius Tags. type: Unknown - dockerimage: demisto/axonius:1.1.0.79132 + dockerimage: demisto/axonius:1.1.0.94077 runonce: false script: '-' subtype: python3 diff --git a/Packs/Axonius/Integrations/Axonius/README.md b/Packs/Axonius/Integrations/Axonius/README.md index 2ee1d55c9dd9..304e3605a8e7 100644 --- a/Packs/Axonius/Integrations/Axonius/README.md +++ b/Packs/Axonius/Integrations/Axonius/README.md @@ -32,7 +32,7 @@ Gather device info by saved query | **Argument Name** | **Description** | **Required** | | --- | --- | --- | | saved_query_name | The name of the devices saved query within Axonius. See https://docs.axonius.com/docs/saved-queries-devices. | Required | -| max_results | The maximum number of results to return. | Optional | +| max_results | The maximum number of results to return. Default is 50. | Optional | #### Context Output @@ -120,7 +120,7 @@ Gather user info by saved query | **Argument Name** | **Description** | **Required** | | --- | --- | --- | | saved_query_name | The name of the users saved query within Axonius. See https://docs.axonius.com/docs/saved-queries-users. | Required | -| max_results | The maximum number of results to return. | Optional | +| max_results | The maximum number of results to return. Default is 50. | Optional | #### Context Output @@ -179,7 +179,7 @@ Gather user info by email address | **Argument Name** | **Description** | **Required** | | --- | --- | --- | | value | The user email address to search for within Axonius. | Required | -| max_results | The maximum number of results to return. | Optional | +| max_results | The maximum number of results to return. Default is 50. | Optional | | fields | Comma separated list of Axonius fields to return. | Optional | @@ -240,7 +240,7 @@ Gather user info by email address using regex | **Argument Name** | **Description** | **Required** | | --- | --- | --- | | value | The user email address to search for within Axonius. | Required | -| max_results | The maximum number of results to return. | Optional | +| max_results | The maximum number of results to return. Default is 50. | Optional | | fields | Comma separated list of Axonius fields to return. | Optional | @@ -303,7 +303,7 @@ Gather user info by username | **Argument Name** | **Description** | **Required** | | --- | --- | --- | | value | The username to search for within Axonius. | Required | -| max_results | The maximum number of results to return. | Optional | +| max_results | The maximum number of results to return. Default is 50. | Optional | | fields | Comma separated list of Axonius fields to return. | Optional | @@ -360,7 +360,7 @@ Gather user info by username using regex | **Argument Name** | **Description** | **Required** | | --- | --- | --- | | value | The username to search for within Axonius. | Required | -| max_results | The maximum number of results to return. | Optional | +| max_results | The maximum number of results to return. Default is 50. | Optional | | fields | Comma separated list of Axonius fields to return. | Optional | @@ -416,7 +416,7 @@ Gather device info by hostname | **Argument Name** | **Description** | **Required** | | --- | --- | --- | | value | The hostname to search for within Axonius. | Required | -| max_results | The maximum number of results to return. | Optional | +| max_results | The maximum number of results to return. Default is 50. | Optional | | fields | Comma separated list of Axonius fields to return. | Optional | @@ -498,7 +498,7 @@ Gather device info by hostname using regex | **Argument Name** | **Description** | **Required** | | --- | --- | --- | | value | The hostname to search for within Axonius using regex. | Required | -| max_results | The maximum number of results to return. | Optional | +| max_results | The maximum number of results to return. Default is 50. | Optional | | fields | Comma separated list of Axonius fields to return. | Optional | @@ -582,7 +582,7 @@ Gather device info by IP address | **Argument Name** | **Description** | **Required** | | --- | --- | --- | | value | The IP address to search for within Axonius. | Required | -| max_results | The maximum number of results to return. | Optional | +| max_results | The maximum number of results to return. Default is 50. | Optional | | fields | Comma separated list of Axonius fields to return. | Optional | @@ -665,7 +665,7 @@ Gather device info by IP address using regex | **Argument Name** | **Description** | **Required** | | --- | --- | --- | | value | The IP address to search for within Axonius. | Required | -| max_results | The maximum number of results to return. | Optional | +| max_results | The maximum number of results to return. Default is 50. | Optional | | fields | Comma separated list of Axonius fields to return. | Optional | @@ -748,7 +748,7 @@ Gather device info by MAC address | **Argument Name** | **Description** | **Required** | | --- | --- | --- | | value | The MAC address to search for within Axonius. | Required | -| max_results | The maximum number of results to return. | Optional | +| max_results | The maximum number of results to return. Default is 50. | Optional | | fields | Comma separated list of Axonius fields to return. | Optional | @@ -831,7 +831,7 @@ Gather device info by MAC address using regex | **Argument Name** | **Description** | **Required** | | --- | --- | --- | | value | The MAC address to search for within Axonius. | Required | -| max_results | The maximum number of results to return. | Optional | +| max_results | The maximum number of results to return. Default is 50. | Optional | | fields | Comma separated list of Axonius fields to return. | Optional | diff --git a/Packs/Axonius/ReleaseNotes/1_2_2.md b/Packs/Axonius/ReleaseNotes/1_2_2.md new file mode 100644 index 000000000000..fd3cb5ff8ae7 --- /dev/null +++ b/Packs/Axonius/ReleaseNotes/1_2_2.md @@ -0,0 +1,7 @@ + +#### Integrations + +##### Axonius + +- Added default value to the max_results argument. +- Updated the Docker image to: *demisto/axonius:1.1.0.94077*. diff --git a/Packs/Axonius/pack_metadata.json b/Packs/Axonius/pack_metadata.json index bb9616a220bf..9f65c0309d22 100644 --- a/Packs/Axonius/pack_metadata.json +++ b/Packs/Axonius/pack_metadata.json @@ -2,7 +2,7 @@ "name": "Axonius", "description": "Enrichment for devices and users in your environment.", "support": "partner", - "currentVersion": "1.2.1", + "currentVersion": "1.2.2", "author": "Axonius", "url": "https://docs.axonius.com", "email": "support@axonius.com", diff --git a/Packs/Azure-Enrichment-Remediation/ReleaseNotes/1_1_16.md b/Packs/Azure-Enrichment-Remediation/ReleaseNotes/1_1_16.md new file mode 100644 index 000000000000..124d4dca3a6c --- /dev/null +++ b/Packs/Azure-Enrichment-Remediation/ReleaseNotes/1_1_16.md @@ -0,0 +1,3 @@ +## Azure Enrichment and Remediation + +- Locked dependencies of the pack to ensure stability for versioned core packs. No changes in this release. diff --git a/Packs/Azure-Enrichment-Remediation/pack_metadata.json b/Packs/Azure-Enrichment-Remediation/pack_metadata.json index 344c5d7749ae..9a1f6ccf89b4 100644 --- a/Packs/Azure-Enrichment-Remediation/pack_metadata.json +++ b/Packs/Azure-Enrichment-Remediation/pack_metadata.json @@ -2,7 +2,7 @@ "name": "Azure Enrichment and Remediation", "description": "Playbooks using multiple Azure content packs for enrichment and remediation purposes", "support": "xsoar", - "currentVersion": "1.1.15", + "currentVersion": "1.1.16", "author": "Cortex XSOAR", "url": "https://www.paloaltonetworks.com/cortex", "email": "", diff --git a/Packs/AzureActiveDirectory/ReleaseNotes/1_3_22.md b/Packs/AzureActiveDirectory/ReleaseNotes/1_3_22.md new file mode 100644 index 000000000000..98d7317c5a9d --- /dev/null +++ b/Packs/AzureActiveDirectory/ReleaseNotes/1_3_22.md @@ -0,0 +1,6 @@ + +#### Integrations + +##### Azure Active Directory Identity Protection (Deprecated) + +- Fixed an issue in **MicrosoftApiModule** where the GCC endpoints were incorrect. \ No newline at end of file diff --git a/Packs/AzureActiveDirectory/pack_metadata.json b/Packs/AzureActiveDirectory/pack_metadata.json index 3fe1f25a76f7..07f498b371dd 100644 --- a/Packs/AzureActiveDirectory/pack_metadata.json +++ b/Packs/AzureActiveDirectory/pack_metadata.json @@ -3,7 +3,7 @@ "description": "Deprecated. Use Microsoft Graph Identity and Access instead.", "support": "xsoar", "hidden": true, - "currentVersion": "1.3.21", + "currentVersion": "1.3.22", "author": "Cortex XSOAR", "url": "https://www.paloaltonetworks.com/cortex", "email": "", diff --git a/Packs/AzureCompute/ReleaseNotes/1_2_23.md b/Packs/AzureCompute/ReleaseNotes/1_2_23.md new file mode 100644 index 000000000000..2ed829c82a52 --- /dev/null +++ b/Packs/AzureCompute/ReleaseNotes/1_2_23.md @@ -0,0 +1,3 @@ +## Azure Compute + +- Locked dependencies of the pack to ensure stability for versioned core packs. No changes in this release. diff --git a/Packs/AzureCompute/ReleaseNotes/1_2_24.md b/Packs/AzureCompute/ReleaseNotes/1_2_24.md new file mode 100644 index 000000000000..8da1ce00c620 --- /dev/null +++ b/Packs/AzureCompute/ReleaseNotes/1_2_24.md @@ -0,0 +1,6 @@ + +#### Integrations + +##### Azure Compute v2 + +- Fixed an issue in **MicrosoftApiModule** where the GCC endpoints were incorrect. diff --git a/Packs/AzureCompute/pack_metadata.json b/Packs/AzureCompute/pack_metadata.json index ef09b90be3c6..8b9f178b4d1a 100644 --- a/Packs/AzureCompute/pack_metadata.json +++ b/Packs/AzureCompute/pack_metadata.json @@ -2,7 +2,7 @@ "name": "Azure Compute", "description": "Create and Manage Azure Virtual Machines", "support": "xsoar", - "currentVersion": "1.2.22", + "currentVersion": "1.2.24", "author": "Cortex XSOAR", "url": "https://www.paloaltonetworks.com/cortex", "email": "", diff --git a/Packs/AzureDataExplorer/Integrations/AzureDataExplorer/AzureDataExplorer.py b/Packs/AzureDataExplorer/Integrations/AzureDataExplorer/AzureDataExplorer.py index e005f9ebbdbc..dde407d2643d 100644 --- a/Packs/AzureDataExplorer/Integrations/AzureDataExplorer/AzureDataExplorer.py +++ b/Packs/AzureDataExplorer/Integrations/AzureDataExplorer/AzureDataExplorer.py @@ -41,7 +41,7 @@ def __init__(self, cluster_url: str, client_id: str, client_activity_prefix: str self.cluster_url = cluster_url self.host = cluster_url.split("https://")[1] self.scope = f'{cluster_url}/user_impersonation offline_access user.read' if 'Authorization' not in connection_type \ - else 'https://management.azure.com/.default' + else f'{cluster_url}/.default' self.client_activity_prefix = client_activity_prefix client_args = assign_params( self_deployed=True, diff --git a/Packs/AzureDataExplorer/Integrations/AzureDataExplorer/AzureDataExplorer.yml b/Packs/AzureDataExplorer/Integrations/AzureDataExplorer/AzureDataExplorer.yml index c06dc98a3eee..2d59396da63b 100644 --- a/Packs/AzureDataExplorer/Integrations/AzureDataExplorer/AzureDataExplorer.yml +++ b/Packs/AzureDataExplorer/Integrations/AzureDataExplorer/AzureDataExplorer.yml @@ -434,7 +434,7 @@ script: - description: Generate the login url used for Authorization code flow. name: azure-data-explorer-generate-login-url arguments: [] - dockerimage: demisto/auth-utils:1.0.0.87472 + dockerimage: demisto/auth-utils:1.0.0.94075 runonce: false script: "-" subtype: python3 diff --git a/Packs/AzureDataExplorer/Integrations/AzureDataExplorer/AzureDataExplorer_test.py b/Packs/AzureDataExplorer/Integrations/AzureDataExplorer/AzureDataExplorer_test.py index fd903e48f7f8..1edbbb9dffc7 100644 --- a/Packs/AzureDataExplorer/Integrations/AzureDataExplorer/AzureDataExplorer_test.py +++ b/Packs/AzureDataExplorer/Integrations/AzureDataExplorer/AzureDataExplorer_test.py @@ -21,7 +21,7 @@ def load_mock_response(file_name: str) -> str: Returns: str: Mock file content. """ - with open(f'test_data/{file_name}', mode='r', encoding='utf-8') as mock_file: + with open(f'test_data/{file_name}', encoding='utf-8') as mock_file: return mock_file.read() @@ -301,9 +301,10 @@ def test_generate_login_url(mocker): redirect_uri = 'redirect_uri' tenant_id = 'tenant_id' client_id = 'client_id' + cluster_url = 'https://help.kusto.windows.net' mocked_params = { 'redirect_uri': redirect_uri, - 'cluster_url': 'https://help.kusto.windows.net', + 'cluster_url': cluster_url, 'self_deployed': 'True', 'tenant_id': tenant_id, 'client_id': client_id, @@ -320,7 +321,7 @@ def test_generate_login_url(mocker): # assert expected_url = f'[login URL](https://login.microsoftonline.com/{tenant_id}/oauth2/v2.0/authorize?' \ 'response_type=code' \ - '&scope=offline_access%20https://management.azure.com/.default' \ + f'&scope=offline_access%20{cluster_url}/.default' \ f'&client_id={client_id}&redirect_uri={redirect_uri}&prompt=consent)' res = AzureDataExplorer.return_results.call_args[0][0].readable_output assert expected_url in res diff --git a/Packs/AzureDataExplorer/ReleaseNotes/1_2_40.md b/Packs/AzureDataExplorer/ReleaseNotes/1_2_40.md new file mode 100644 index 000000000000..4c18c84c81ac --- /dev/null +++ b/Packs/AzureDataExplorer/ReleaseNotes/1_2_40.md @@ -0,0 +1,6 @@ + +#### Integrations + +##### Azure Data Explorer + +- Fixed an issue in **MicrosoftApiModule** where the GCC endpoints were incorrect. diff --git a/Packs/AzureDataExplorer/ReleaseNotes/1_2_41.md b/Packs/AzureDataExplorer/ReleaseNotes/1_2_41.md new file mode 100644 index 000000000000..d60c2512df4e --- /dev/null +++ b/Packs/AzureDataExplorer/ReleaseNotes/1_2_41.md @@ -0,0 +1,7 @@ + +#### Integrations + +##### Azure Data Explorer + +- Fixed an issue where the scope was incorrect for the Authorization Code Flow. +- Updated the Docker image to: *demisto/auth-utils:1.0.0.94075*. diff --git a/Packs/AzureDataExplorer/pack_metadata.json b/Packs/AzureDataExplorer/pack_metadata.json index ae5ff4e89ce2..3885997431df 100644 --- a/Packs/AzureDataExplorer/pack_metadata.json +++ b/Packs/AzureDataExplorer/pack_metadata.json @@ -2,7 +2,7 @@ "name": "Azure Data Explorer", "description": "Use Azure Data Explorer integration to collect and analyze data inside clusters of Azure Data Explorer and manage search queries.", "support": "xsoar", - "currentVersion": "1.2.39", + "currentVersion": "1.2.41", "author": "Cortex XSOAR", "url": "https://www.paloaltonetworks.com/cortex", "email": "", diff --git a/Packs/AzureDevOps/ReleaseNotes/1_3_18.md b/Packs/AzureDevOps/ReleaseNotes/1_3_18.md new file mode 100644 index 000000000000..cd0a0556cf1d --- /dev/null +++ b/Packs/AzureDevOps/ReleaseNotes/1_3_18.md @@ -0,0 +1,6 @@ + +#### Integrations + +##### AzureDevOps + +- Fixed an issue in **MicrosoftApiModule** where the GCC endpoints were incorrect. diff --git a/Packs/AzureDevOps/pack_metadata.json b/Packs/AzureDevOps/pack_metadata.json index 7058a634b4cf..38fc5bbdd404 100644 --- a/Packs/AzureDevOps/pack_metadata.json +++ b/Packs/AzureDevOps/pack_metadata.json @@ -2,7 +2,7 @@ "name": "AzureDevOps", "description": "Create and manage Git repositories in Azure DevOps Services.", "support": "xsoar", - "currentVersion": "1.3.17", + "currentVersion": "1.3.18", "author": "Cortex XSOAR", "url": "https://www.paloaltonetworks.com/cortex", "email": "", diff --git a/Packs/AzureFirewall/ReleaseNotes/1_1_39.md b/Packs/AzureFirewall/ReleaseNotes/1_1_39.md new file mode 100644 index 000000000000..c374ab648130 --- /dev/null +++ b/Packs/AzureFirewall/ReleaseNotes/1_1_39.md @@ -0,0 +1,6 @@ + +#### Integrations + +##### Azure Firewall + +- Fixed an issue in **MicrosoftApiModule** where the GCC endpoints were incorrect. diff --git a/Packs/AzureFirewall/pack_metadata.json b/Packs/AzureFirewall/pack_metadata.json index 6dbe65062fd3..b7126cd66d51 100644 --- a/Packs/AzureFirewall/pack_metadata.json +++ b/Packs/AzureFirewall/pack_metadata.json @@ -2,7 +2,7 @@ "name": "Azure Firewall", "description": "Azure Firewall is a cloud-native and intelligent network firewall security service that provides breed threat protection for cloud workloads running in Azure.It's a fully stateful, firewall as a service with built-in high availability and unrestricted cloud scalability.", "support": "xsoar", - "currentVersion": "1.1.38", + "currentVersion": "1.1.39", "author": "Cortex XSOAR", "url": "https://www.paloaltonetworks.com/cortex", "email": "", diff --git a/Packs/AzureKeyVault/ReleaseNotes/1_1_43.md b/Packs/AzureKeyVault/ReleaseNotes/1_1_43.md new file mode 100644 index 000000000000..00c91410dfd3 --- /dev/null +++ b/Packs/AzureKeyVault/ReleaseNotes/1_1_43.md @@ -0,0 +1,6 @@ + +#### Integrations + +##### Azure Key Vault + +- Fixed an issue in **MicrosoftApiModule** where the GCC endpoints were incorrect. diff --git a/Packs/AzureKeyVault/pack_metadata.json b/Packs/AzureKeyVault/pack_metadata.json index db79b78a0f95..435268020553 100644 --- a/Packs/AzureKeyVault/pack_metadata.json +++ b/Packs/AzureKeyVault/pack_metadata.json @@ -2,7 +2,7 @@ "name": "Azure Key Vault", "description": "Use Key Vault to safeguard and manage cryptographic keys and secrets used by cloud applications and services.", "support": "xsoar", - "currentVersion": "1.1.42", + "currentVersion": "1.1.43", "author": "Cortex XSOAR", "url": "https://www.paloaltonetworks.com/cortex", "email": "", diff --git a/Packs/AzureKubernetesServices/ReleaseNotes/1_1_24.md b/Packs/AzureKubernetesServices/ReleaseNotes/1_1_24.md new file mode 100644 index 000000000000..f8f130690da1 --- /dev/null +++ b/Packs/AzureKubernetesServices/ReleaseNotes/1_1_24.md @@ -0,0 +1,6 @@ + +#### Integrations + +##### Azure Kubernetes Services + +- Fixed an issue in **MicrosoftApiModule** where the GCC endpoints were incorrect. diff --git a/Packs/AzureKubernetesServices/pack_metadata.json b/Packs/AzureKubernetesServices/pack_metadata.json index 962aa69af538..74d03e31029b 100644 --- a/Packs/AzureKubernetesServices/pack_metadata.json +++ b/Packs/AzureKubernetesServices/pack_metadata.json @@ -2,7 +2,7 @@ "name": "Azure Kubernetes Services", "description": "Deploy and manage containerized applications with a fully managed Kubernetes service.", "support": "xsoar", - "currentVersion": "1.1.23", + "currentVersion": "1.1.24", "author": "Cortex XSOAR", "url": "https://www.paloaltonetworks.com/cortex", "email": "", diff --git a/Packs/AzureLogAnalytics/ReleaseNotes/1_1_30.md b/Packs/AzureLogAnalytics/ReleaseNotes/1_1_30.md new file mode 100644 index 000000000000..938138b43d88 --- /dev/null +++ b/Packs/AzureLogAnalytics/ReleaseNotes/1_1_30.md @@ -0,0 +1,6 @@ + +#### Integrations + +##### Azure Log Analytics + +- Fixed an issue in **MicrosoftApiModule** where the GCC endpoints were incorrect. diff --git a/Packs/AzureLogAnalytics/pack_metadata.json b/Packs/AzureLogAnalytics/pack_metadata.json index 2c4e1d643146..37b16939a711 100644 --- a/Packs/AzureLogAnalytics/pack_metadata.json +++ b/Packs/AzureLogAnalytics/pack_metadata.json @@ -2,7 +2,7 @@ "name": "Azure Log Analytics", "description": "Log Analytics is a service that helps you collect and analyze data generated by resources in your cloud and on-premises environments.", "support": "xsoar", - "currentVersion": "1.1.29", + "currentVersion": "1.1.30", "author": "Cortex XSOAR", "url": "https://www.paloaltonetworks.com/cortex", "email": "", diff --git a/Packs/AzureNetworkSecurityGroups/ReleaseNotes/1_2_27.md b/Packs/AzureNetworkSecurityGroups/ReleaseNotes/1_2_27.md new file mode 100644 index 000000000000..1c49af22c1cc --- /dev/null +++ b/Packs/AzureNetworkSecurityGroups/ReleaseNotes/1_2_27.md @@ -0,0 +1,3 @@ +## Azure Network Security Groups + +- Locked dependencies of the pack to ensure stability for versioned core packs. No changes in this release. diff --git a/Packs/AzureNetworkSecurityGroups/ReleaseNotes/1_2_28.md b/Packs/AzureNetworkSecurityGroups/ReleaseNotes/1_2_28.md new file mode 100644 index 000000000000..ee504711d387 --- /dev/null +++ b/Packs/AzureNetworkSecurityGroups/ReleaseNotes/1_2_28.md @@ -0,0 +1,6 @@ + +#### Integrations + +##### Azure Network Security Groups + +- Fixed an issue in **MicrosoftApiModule** where the GCC endpoints were incorrect. diff --git a/Packs/AzureNetworkSecurityGroups/pack_metadata.json b/Packs/AzureNetworkSecurityGroups/pack_metadata.json index 5944a9175500..b682f9b02fe3 100644 --- a/Packs/AzureNetworkSecurityGroups/pack_metadata.json +++ b/Packs/AzureNetworkSecurityGroups/pack_metadata.json @@ -2,7 +2,7 @@ "name": "Azure Network Security Groups", "description": "Azure Network Security Groups are used to filter network traffic to and from Azure resources in an Azure virtual network", "support": "xsoar", - "currentVersion": "1.2.26", + "currentVersion": "1.2.28", "author": "Cortex XSOAR", "url": "https://www.paloaltonetworks.com/cortex", "email": "", diff --git a/Packs/AzureRiskyUsers/ReleaseNotes/1_1_34.md b/Packs/AzureRiskyUsers/ReleaseNotes/1_1_34.md new file mode 100644 index 000000000000..20c26a004c1c --- /dev/null +++ b/Packs/AzureRiskyUsers/ReleaseNotes/1_1_34.md @@ -0,0 +1,6 @@ + +#### Integrations + +##### Azure Risky Users + +- Fixed an issue in **MicrosoftApiModule** where the GCC endpoints were incorrect. diff --git a/Packs/AzureRiskyUsers/pack_metadata.json b/Packs/AzureRiskyUsers/pack_metadata.json index d5ac31469594..fa036e589bd5 100644 --- a/Packs/AzureRiskyUsers/pack_metadata.json +++ b/Packs/AzureRiskyUsers/pack_metadata.json @@ -2,7 +2,7 @@ "name": "Azure Risky Users", "description": "Azure Risky Users provides access to all at-risk users and risk detections in Azure AD environment.", "support": "xsoar", - "currentVersion": "1.1.33", + "currentVersion": "1.1.34", "author": "Cortex XSOAR", "url": "https://www.paloaltonetworks.com/cortex", "email": "", diff --git a/Packs/AzureSQLManagement/ReleaseNotes/1_1_44.md b/Packs/AzureSQLManagement/ReleaseNotes/1_1_44.md new file mode 100644 index 000000000000..bc83605c0a5a --- /dev/null +++ b/Packs/AzureSQLManagement/ReleaseNotes/1_1_44.md @@ -0,0 +1,6 @@ + +#### Integrations + +##### Azure SQL Management + +- Fixed an issue in **MicrosoftApiModule** where the GCC endpoints were incorrect. diff --git a/Packs/AzureSQLManagement/pack_metadata.json b/Packs/AzureSQLManagement/pack_metadata.json index 0aa9e69fe657..362084c66d8e 100644 --- a/Packs/AzureSQLManagement/pack_metadata.json +++ b/Packs/AzureSQLManagement/pack_metadata.json @@ -2,7 +2,7 @@ "name": "Azure SQL Management", "description": "Microsoft Azure SQL Database is a managed cloud database provided as part of Microsoft Azure", "support": "xsoar", - "currentVersion": "1.1.43", + "currentVersion": "1.1.44", "author": "Cortex XSOAR", "url": "https://www.paloaltonetworks.com/cortex", "email": "", diff --git a/Packs/AzureSecurityCenter/ReleaseNotes/2_0_24.md b/Packs/AzureSecurityCenter/ReleaseNotes/2_0_24.md new file mode 100644 index 000000000000..ce681281d1ee --- /dev/null +++ b/Packs/AzureSecurityCenter/ReleaseNotes/2_0_24.md @@ -0,0 +1,10 @@ + +#### Integrations + +##### Microsoft Defender for Cloud Event Collector + +- Fixed an issue in **MicrosoftApiModule** where the GCC endpoints were incorrect. + +##### Microsoft Defender for Cloud + +- Fixed an issue in **MicrosoftApiModule** where the GCC endpoints were incorrect. diff --git a/Packs/AzureSecurityCenter/pack_metadata.json b/Packs/AzureSecurityCenter/pack_metadata.json index 86c83906d677..fc52e50fe290 100644 --- a/Packs/AzureSecurityCenter/pack_metadata.json +++ b/Packs/AzureSecurityCenter/pack_metadata.json @@ -2,7 +2,7 @@ "name": "Microsoft Defender for Cloud", "description": "Unified security management and advanced threat protection across hybrid cloud workloads.", "support": "xsoar", - "currentVersion": "2.0.23", + "currentVersion": "2.0.24", "author": "Cortex XSOAR", "url": "https://www.paloaltonetworks.com/cortex", "email": "", diff --git a/Packs/AzureSentinel/IncidentFields/incidentfield-Microsoft_Sentinel_Incident_Number.json b/Packs/AzureSentinel/IncidentFields/incidentfield-Microsoft_Sentinel_Incident_Number.json index 81ba7f162860..140200737da0 100644 --- a/Packs/AzureSentinel/IncidentFields/incidentfield-Microsoft_Sentinel_Incident_Number.json +++ b/Packs/AzureSentinel/IncidentFields/incidentfield-Microsoft_Sentinel_Incident_Number.json @@ -20,7 +20,7 @@ "openEnded": false, "associatedToAll": true, "unmapped": false, - "unsearchable": true, + "unsearchable": false, "caseInsensitive": true, "sla": 0, "threshold": 72, diff --git a/Packs/AzureSentinel/Integrations/AzureSentinel/AzureSentinel.py b/Packs/AzureSentinel/Integrations/AzureSentinel/AzureSentinel.py index 8adbe2aec6c3..5728900ef3b9 100644 --- a/Packs/AzureSentinel/Integrations/AzureSentinel/AzureSentinel.py +++ b/Packs/AzureSentinel/Integrations/AzureSentinel/AzureSentinel.py @@ -74,7 +74,8 @@ OUTGOING_MIRRORED_FIELDS = {filed: pascalToSpace(filed) for filed in OUTGOING_MIRRORED_FIELDS} LEVEL_TO_SEVERITY = {0: 'Informational', 0.5: 'Informational', 1: 'Low', 2: 'Medium', 3: 'High', 4: 'High'} -CLASSIFICATION_REASON = {'FalsePositive': 'InaccurateData', 'TruePositive': 'SuspiciousActivity'} +CLASSIFICATION_REASON = {'FalsePositive': 'InaccurateData', 'TruePositive': 'SuspiciousActivity', + 'BenignPositive': 'SuspiciousButExpected'} class AzureSentinelClient: @@ -687,17 +688,19 @@ def update_incident_request(client: AzureSentinelClient, incident_id: str, data: if any(field not in data for field in required_fields): raise DemistoException(f'Update incident request is missing one of the required fields for the ' f'API: {required_fields}') - properties = { 'title': data.get('title'), 'description': delta.get('description'), 'severity': LEVEL_TO_SEVERITY[data.get('severity', '')], 'status': 'Active', - 'labels': [{'labelName': label, 'type': 'User'} for label in delta.get('tags', [])], 'firstActivityTimeUtc': delta.get('firstActivityTimeUtc'), 'lastActivityTimeUtc': delta.get('lastActivityTimeUtc'), - 'owner': demisto.get(fetched_incident_data, 'properties.owner', {}) + 'owner': demisto.get(fetched_incident_data, 'properties.owner', {}), + 'labels': demisto.get(fetched_incident_data, 'properties.labels', []) } + + properties['labels'] += [{'labelName': label, 'type': 'User'} for label in delta.get('tags', [])] + if close_ticket: properties |= { 'status': 'Closed', @@ -711,7 +714,8 @@ def update_incident_request(client: AzureSentinelClient, incident_id: str, data: 'properties': properties } demisto.debug(f'Updating incident with remote ID {incident_id} with data: {data}') - return client.http_request('PUT', f'incidents/{incident_id}', data=data) + response = client.http_request('PUT', f'incidents/{incident_id}', data=data) + return response def update_remote_incident(client: AzureSentinelClient, data: Dict[str, Any], delta: Dict[str, Any], diff --git a/Packs/AzureSentinel/Integrations/AzureSentinel/AzureSentinel.yml b/Packs/AzureSentinel/Integrations/AzureSentinel/AzureSentinel.yml index 41464d3e1ca2..0b82e6d41890 100644 --- a/Packs/AzureSentinel/Integrations/AzureSentinel/AzureSentinel.yml +++ b/Packs/AzureSentinel/Integrations/AzureSentinel/AzureSentinel.yml @@ -2381,7 +2381,7 @@ script: execution: false name: azure-sentinel-auth-reset arguments: [] - dockerimage: demisto/crypto:1.0.0.87358 + dockerimage: demisto/crypto:1.0.0.94037 isfetch: true runonce: false script: '-' diff --git a/Packs/AzureSentinel/Integrations/AzureSentinel/AzureSentinel_test.py b/Packs/AzureSentinel/Integrations/AzureSentinel/AzureSentinel_test.py index da1eec9538e4..30d5602de9ad 100644 --- a/Packs/AzureSentinel/Integrations/AzureSentinel/AzureSentinel_test.py +++ b/Packs/AzureSentinel/Integrations/AzureSentinel/AzureSentinel_test.py @@ -1730,6 +1730,22 @@ def test_close_incident_in_remote(mocker, delta, data, close_ticket_param, to_cl {'title': 'Title', 'severity': 'Low', 'status': 'Active', 'classification': 'Undetermined'}, {'title': 'Title', 'severity': 'Low', 'status': 'Closed', 'classification': 'Undetermined'}, True + ), + ( # Update labels of active incident when no labels exist. + {'title': 'Title', 'description': 'desc', 'severity': 2, 'status': 1, 'tags': []}, + {'title': 'Title', 'tags': ['Test']}, + {'title': 'Title', 'description': 'desc', 'severity': 'Medium', 'status': 'Active'}, + {'title': 'Title', 'severity': 'Medium', 'status': 'Active', 'labels': [{'labelName': 'Test', 'type': 'User'}]}, + False + ), + ( # Update labels of active incident when a label already exist. + {'title': 'Title', 'description': 'desc', 'severity': 2, 'status': 1, 'tags': ['Test']}, + {'title': 'Title', 'tags': ['Test2']}, + {'title': 'Title', 'description': 'desc', 'severity': 'Medium', 'status': 'Active', + 'properties': {'labels': [{'labelName': 'Test', 'type': 'User'}]}}, + {'title': 'Title', 'severity': 'Medium', 'status': 'Active', + 'labels': [{'labelName': 'Test', 'type': 'User'}, {'labelName': 'Test2', 'type': 'User'}]}, + False ) ]) def test_update_incident_request(mocker, data, delta, mocked_fetch_data, expected_response, close_ticket): diff --git a/Packs/AzureSentinel/ReleaseNotes/1_5_40.md b/Packs/AzureSentinel/ReleaseNotes/1_5_40.md new file mode 100644 index 000000000000..b453a6a40fc3 --- /dev/null +++ b/Packs/AzureSentinel/ReleaseNotes/1_5_40.md @@ -0,0 +1,6 @@ + +#### Integrations + +##### Microsoft Sentinel + +- Fixed an issue where mirroring labels did not work properly. diff --git a/Packs/AzureSentinel/ReleaseNotes/1_5_41.md b/Packs/AzureSentinel/ReleaseNotes/1_5_41.md new file mode 100644 index 000000000000..1dbb13194748 --- /dev/null +++ b/Packs/AzureSentinel/ReleaseNotes/1_5_41.md @@ -0,0 +1,6 @@ + +#### Incident Fields + +##### Microsoft Sentinel Incident Number + +- Updated the field to be searchable. diff --git a/Packs/AzureSentinel/ReleaseNotes/1_5_42.md b/Packs/AzureSentinel/ReleaseNotes/1_5_42.md new file mode 100644 index 000000000000..aa969ff133a2 --- /dev/null +++ b/Packs/AzureSentinel/ReleaseNotes/1_5_42.md @@ -0,0 +1,7 @@ + +#### Integrations + +##### Microsoft Sentinel + +- Fixed an issue where mirroring fails during incident closing when classification is 'BenignPositive'. +- Updated the Docker image to: *demisto/crypto:1.0.0.94037*. \ No newline at end of file diff --git a/Packs/AzureSentinel/ReleaseNotes/1_5_43.md b/Packs/AzureSentinel/ReleaseNotes/1_5_43.md new file mode 100644 index 000000000000..2ef59811a5de --- /dev/null +++ b/Packs/AzureSentinel/ReleaseNotes/1_5_43.md @@ -0,0 +1,6 @@ + +#### Integrations + +##### Microsoft Sentinel + +- Fixed an issue in **MicrosoftApiModule** where the GCC endpoints were incorrect. diff --git a/Packs/AzureSentinel/pack_metadata.json b/Packs/AzureSentinel/pack_metadata.json index faf31fcb5e85..79e2017052a1 100644 --- a/Packs/AzureSentinel/pack_metadata.json +++ b/Packs/AzureSentinel/pack_metadata.json @@ -2,7 +2,7 @@ "name": "Microsoft Sentinel", "description": "Microsoft Sentinel is a cloud-native security information and event manager (SIEM) platform that uses built-in AI to help analyze large volumes of data across an enterprise.", "support": "xsoar", - "currentVersion": "1.5.39", + "currentVersion": "1.5.43", "author": "Cortex XSOAR", "url": "https://www.paloaltonetworks.com/cortex", "email": "", diff --git a/Packs/AzureStorage/ReleaseNotes/1_2_24.md b/Packs/AzureStorage/ReleaseNotes/1_2_24.md new file mode 100644 index 000000000000..7eebdb1cc774 --- /dev/null +++ b/Packs/AzureStorage/ReleaseNotes/1_2_24.md @@ -0,0 +1,6 @@ + +#### Integrations + +##### Azure Storage Management + +- Fixed an issue in **MicrosoftApiModule** where the GCC endpoints were incorrect. diff --git a/Packs/AzureStorage/pack_metadata.json b/Packs/AzureStorage/pack_metadata.json index 568292484f0c..01637f02aec0 100644 --- a/Packs/AzureStorage/pack_metadata.json +++ b/Packs/AzureStorage/pack_metadata.json @@ -2,7 +2,7 @@ "name": "Azure Storage Management", "description": "Deploy and manage storage accounts and blob service properties.", "support": "xsoar", - "currentVersion": "1.2.23", + "currentVersion": "1.2.24", "author": "Cortex XSOAR", "url": "https://www.paloaltonetworks.com/cortex", "email": "", diff --git a/Packs/AzureWAF/ReleaseNotes/1_1_22.md b/Packs/AzureWAF/ReleaseNotes/1_1_22.md new file mode 100644 index 000000000000..658140577b95 --- /dev/null +++ b/Packs/AzureWAF/ReleaseNotes/1_1_22.md @@ -0,0 +1,6 @@ + +#### Integrations + +##### Azure Web Application Firewall + +- Fixed an issue in **MicrosoftApiModule** where the GCC endpoints were incorrect. diff --git a/Packs/AzureWAF/pack_metadata.json b/Packs/AzureWAF/pack_metadata.json index f84743536c74..f60fc73e222c 100644 --- a/Packs/AzureWAF/pack_metadata.json +++ b/Packs/AzureWAF/pack_metadata.json @@ -2,7 +2,7 @@ "name": "Azure WAF", "description": "Azure Web Application Firewall is used to detect web related attacks targeting your web servers hosted in azure and allow quick respond to threats", "support": "xsoar", - "currentVersion": "1.1.21", + "currentVersion": "1.1.22", "author": "Cortex XSOAR", "url": "https://www.paloaltonetworks.com/cortex", "email": "", diff --git a/Packs/Base/ReleaseNotes/1_33_53.md b/Packs/Base/ReleaseNotes/1_33_53.md new file mode 100644 index 000000000000..9fb93e60e7f6 --- /dev/null +++ b/Packs/Base/ReleaseNotes/1_33_53.md @@ -0,0 +1,3 @@ +## Base + +- Locked dependencies of the pack to ensure stability for versioned core packs. No changes in this release. diff --git a/Packs/Base/ReleaseNotes/1_33_54.md b/Packs/Base/ReleaseNotes/1_33_54.md new file mode 100644 index 000000000000..bd30da226ae1 --- /dev/null +++ b/Packs/Base/ReleaseNotes/1_33_54.md @@ -0,0 +1,6 @@ + +#### Scripts + +##### CommonServerPython + +Added the *is_time_sensitive* method, indicating whether the current code is a reputation command called with auto-extract=inline. diff --git a/Packs/Base/ReleaseNotes/1_34_0.md b/Packs/Base/ReleaseNotes/1_34_0.md new file mode 100644 index 000000000000..30e1b041ed66 --- /dev/null +++ b/Packs/Base/ReleaseNotes/1_34_0.md @@ -0,0 +1,6 @@ + +#### Scripts + +##### CommonServerPython + +- Added a wrapper for the *Demisto* class. diff --git a/Packs/Base/ReleaseNotes/1_34_1.md b/Packs/Base/ReleaseNotes/1_34_1.md new file mode 100644 index 000000000000..97f71382d6ac --- /dev/null +++ b/Packs/Base/ReleaseNotes/1_34_1.md @@ -0,0 +1,6 @@ + +#### Scripts + +##### DBotTrainTextClassifierV2 +- Updated the Docker image to: *demisto/ml:1.0.0.93129*. +- Improved handling of the script arguments. diff --git a/Packs/Base/ReleaseNotes/1_34_2.md b/Packs/Base/ReleaseNotes/1_34_2.md new file mode 100644 index 000000000000..379e94d72f83 --- /dev/null +++ b/Packs/Base/ReleaseNotes/1_34_2.md @@ -0,0 +1,6 @@ + +#### Scripts + +##### SanePdfReports + +- Updated the Docker image to: *demisto/sane-pdf-reports:1.0.0.93953*. diff --git a/Packs/Base/ReleaseNotes/1_34_3.md b/Packs/Base/ReleaseNotes/1_34_3.md new file mode 100644 index 000000000000..4dbb74e03f06 --- /dev/null +++ b/Packs/Base/ReleaseNotes/1_34_3.md @@ -0,0 +1,6 @@ + +#### Scripts + +##### DBotTrainTextClassifierV2 + +- Changed the script's behavior to return an informative non-error message when no incidents are received in the input file instead of returning an error. diff --git a/Packs/Base/ReleaseNotes/1_34_4.md b/Packs/Base/ReleaseNotes/1_34_4.md new file mode 100644 index 000000000000..dc4532677373 --- /dev/null +++ b/Packs/Base/ReleaseNotes/1_34_4.md @@ -0,0 +1,4 @@ +#### Scripts + +##### CommonServerPython +Updated the **Retry** mechanism to include the methods **PATCH** and **DELETE**. \ No newline at end of file diff --git a/Packs/Base/ReleaseNotes/1_34_5.md b/Packs/Base/ReleaseNotes/1_34_5.md new file mode 100644 index 000000000000..f44e2e1bcaf0 --- /dev/null +++ b/Packs/Base/ReleaseNotes/1_34_5.md @@ -0,0 +1,7 @@ + +#### Scripts + +##### DBotFindSimilarIncidents + +- Documentation, error messaging, and logging improvements. +- Updated the Docker image to: *demisto/ml:1.0.0.94241*. diff --git a/Packs/Base/ReleaseNotes/1_34_6.md b/Packs/Base/ReleaseNotes/1_34_6.md new file mode 100644 index 000000000000..53693d741ca2 --- /dev/null +++ b/Packs/Base/ReleaseNotes/1_34_6.md @@ -0,0 +1,6 @@ + +#### Scripts + +##### CommonServer + +Fixed the *tableToMarkdown* function to handle text with bad formatting. \ No newline at end of file diff --git a/Packs/Base/Scripts/CommonServer/CommonServer.js b/Packs/Base/Scripts/CommonServer/CommonServer.js index 945d22b78145..10fc6b17e3b4 100644 --- a/Packs/Base/Scripts/CommonServer/CommonServer.js +++ b/Packs/Base/Scripts/CommonServer/CommonServer.js @@ -278,19 +278,12 @@ function tableToMarkdown(name, t, headers, cellDelimiter, headerTransform) { for(var i=0; i 1) { - mdResults += newHeaders.join('|') + '\n'; - } else { - mdResults += newHeaders[0] + '|' + '\n'; - } + mdResults += '| ' + newHeaders.join(' | ') + ' |' + '\n'; var sep = []; headers.forEach(function(h){ sep.push('---'); }); - if (sep.length === 1) { - sep[0] = sep[0]+'|'; - } - mdResults += sep.join('|') + '\n'; + mdResults += '| ' + sep.join(' | ') + ' |' + '\n'; t.forEach(function(entry){ var vals = []; if(typeof(entry) !== 'object' && !(entry instanceof Array)){ @@ -305,10 +298,8 @@ function tableToMarkdown(name, t, headers, cellDelimiter, headerTransform) { vals.push(stringEscapeMD(formatCell(entry[h], cellDelimiter), true, true) || ' '); } }); - if (vals.length === 1) { - vals[0] = vals[0]+'|'; - } - mdResults += vals.join(' | ') + '\n'; + + mdResults += '| ' + vals.join(' | ') + ' |' + '\n'; }); } else{ mdResults += 'No data returned\n'; diff --git a/Packs/Base/Scripts/CommonServerPython/CommonServerPython.py b/Packs/Base/Scripts/CommonServerPython/CommonServerPython.py index 9141d8d60055..c2292aff7940 100644 --- a/Packs/Base/Scripts/CommonServerPython/CommonServerPython.py +++ b/Packs/Base/Scripts/CommonServerPython/CommonServerPython.py @@ -8898,7 +8898,7 @@ def _implement_retry(self, retries=0, method_whitelist = "allowed_methods" if hasattr( Retry.DEFAULT, "allowed_methods") else "method_whitelist" # type: ignore[attr-defined] whitelist_kawargs = { - method_whitelist: frozenset(['GET', 'POST', 'PUT']) + method_whitelist: frozenset(['GET', 'POST', 'PUT', 'PATCH', 'DELETE']) } retry = Retry( total=retries, @@ -11992,7 +11992,21 @@ def safe_sleep(duration_seconds): .format(duration_seconds, run_duration)) else: demisto.info('Safe sleep is not supported in this server version, sleeping for the requested time.') - time.sleep(duration_seconds) + time.sleep(duration_seconds) # pylint: disable=E9003 + + +def is_time_sensitive(): + """ + Checks if the command reputation (auto-enrichment) is called as auto-extract=inline. + This function checks if the 'isTimeSensitive' attribute exists in the 'demisto' object and if it's set to True. + + :return: bool + :rtype: ``bool`` + """ + return hasattr(demisto, 'isTimeSensitive') and demisto.isTimeSensitive() + + +from DemistoClassApiModule import * # type:ignore [no-redef] # noqa:E402 ########################################### diff --git a/Packs/Base/Scripts/DBotFindSimilarIncidents/DBotFindSimilarIncidents.py b/Packs/Base/Scripts/DBotFindSimilarIncidents/DBotFindSimilarIncidents.py index c7eded235f00..11191bf70d91 100644 --- a/Packs/Base/Scripts/DBotFindSimilarIncidents/DBotFindSimilarIncidents.py +++ b/Packs/Base/Scripts/DBotFindSimilarIncidents/DBotFindSimilarIncidents.py @@ -11,15 +11,21 @@ import pandas as pd from scipy.spatial.distance import cdist from typing import Any +from types import UnionType from GetIncidentsApiModule import * # noqa: E402 warnings.simplefilter("ignore") warnings.filterwarnings('ignore', category=UserWarning) +FIELD_SKIP_REASON_DOESNT_EXIST = "The '{field}' field does not exist in incident" +FIELD_SKIP_REASON_FALSY_VALUE = "The '{field}' field has a falsy value in current incident: '{val}'" +FIELD_SKIP_REASON_INVALID_TYPE = "Expected type of the '{field}' field is: {valid}, actual type is: {type}" +FIELD_SKIP_REASON_TOO_SHORT = "Value of the '{field}' field in incident: '{val}' has length of {len}" +FIELD_SKIP_REASON_LIST_OF_FALSY_VALS = "Value of '{field}' field in incident: '{val}' is a list with only falsy values" + INCIDENT_ALIAS = 'alert' if is_xsiam() else 'incident' -MESSAGE_NO_FIELDS_USED = "- No field are used to find similarity. Possible reasons: 1) No field selected " \ - f" 2) Selected field are empty for this {INCIDENT_ALIAS} 3) Fields are misspelled" +MESSAGE_NO_FIELDS_USED = "- No field are used to find similarity. Reasons:\n{}" MESSAGE_NO_INCIDENT_FETCHED = f"- 0 {INCIDENT_ALIAS}s fetched with these exact match for the given dates." @@ -371,6 +377,7 @@ def fit_transform(self): transformation = self.params[self.transformer_type] transformer = transformation['transformer'](self.field, transformation['params'], transformation['normalize'], self.incident_to_match) + demisto.debug(f"Running fit_transform for field {self.field} with transformer {type(transformer)}") x_vect = transformer.fit_transform(self.incidents_df) incident_vect = transformer.transform(self.incident_to_match) @@ -382,6 +389,7 @@ def get_score(self): """ scoring_function = self.params[self.transformer_type]['scoring_function'] X_vect, incident_vect = self.fit_transform() + demisto.debug(f"Calculating similarity of field {self.field} with function {scoring_function.__name__}") dist = scoring_function(X_vect, incident_vect) self.incidents_df['similarity %s' % self.field] = np.round(dist, 2) return self.incidents_df @@ -407,56 +415,78 @@ def init_prediction(self, p_incident_to_match, p_incidents_df, p_field_for_comma :param p_field_for_json: list of incident fields that for the transformer 'json' :return: """ - self.incident_to_match = p_incident_to_match - self.incidents_df = p_incidents_df + self.incident_to_match: pd.DataFrame = p_incident_to_match + self.incidents_df: pd.DataFrame = p_incidents_df self.field_for_command_line = p_field_for_command_line self.field_for_potential_exact_match = p_field_for_potential_exact_match self.field_for_display_fields_incidents = p_field_for_display_fields_incidents self.field_for_json = p_field_for_json def predict(self): - self.remove_empty_or_short_fields() + should_proceed, all_skip_reasons = self.remove_empty_or_short_fields() + if not should_proceed: + raise DemistoException("\n".join(all_skip_reasons) or " * No fields were provided for similarity calculation") self.get_score() self.compute_final_score() return self.prepare_for_display(), self.field_for_command_line + self.field_for_potential_exact_match + \ self.field_for_json - def remove_empty_or_short_fields(self): + def remove_empty_or_short_fields(self) -> tuple[bool, list[str]]: """ Remove field where value is empty or is shorter than 2 characters or unusable or does not exist in the incident. - :return: + :return: whether should proceed with calculation, and a list of reasons for skipped fields """ - remove_list = [] - for field in self.field_for_command_line: - if field not in self.incident_to_match.columns \ - or not self.incident_to_match[field].values[0] \ - or (not isinstance(self.incident_to_match[field].values[0], str) and not isinstance( - self.incident_to_match[field].values[0], list)) \ - or self.incident_to_match[field].values[0] == 'None' \ - or len(self.incident_to_match[field].values[0]) < 2 \ - or self.incident_to_match[field].values[0] == 'N/A': - remove_list.append(field) - self.field_for_command_line = [x for x in self.field_for_command_line if x not in remove_list] - - remove_list = [] - for field in self.field_for_potential_exact_match: - if field not in self.incident_to_match.columns or not self.incident_to_match[field].values[ - 0] or not isinstance(self.incident_to_match[field].values[0], str) or \ - len(self.incident_to_match[field].values[0]) < 2 or \ - self.incident_to_match[field].values[0] == 'None' or self.incident_to_match[field].values[ - 0] == 'N/A': - remove_list.append(field) - self.field_for_potential_exact_match = [x for x in self.field_for_potential_exact_match if x not in remove_list] - - remove_list = [] - for field in self.field_for_json: - if field not in self.incident_to_match.columns or not self.incident_to_match[field].values[ - 0] or self.incident_to_match[field].values[0] == 'None' \ - or len(self.incident_to_match[field].values[0]) < 2 \ - or self.incident_to_match[field].values[0] == 'N/A' \ - or all(not x for x in self.incident_to_match[field].values[0]): - remove_list.append(field) - self.field_for_json = [x for x in self.field_for_json if x not in remove_list] + all_skip_reasons = [] + + def find_skip_reason(field: str, valid_types: type | UnionType | None) -> str | None: + skip_reason = None + # returns a reason to drop field if exists, or None if no such + if field not in self.incident_to_match.columns: + skip_reason = FIELD_SKIP_REASON_DOESNT_EXIST.format(field=field) + else: + val = self.incident_to_match[field].values[0] + if not val or val in ["None", "N/A"]: + skip_reason = FIELD_SKIP_REASON_FALSY_VALUE.format(field=field, val=val) + elif valid_types and not isinstance(val, valid_types): + skip_reason = FIELD_SKIP_REASON_INVALID_TYPE.format(field=field, valid=valid_types, type=type(val)) + elif len(val) < 2: + skip_reason = FIELD_SKIP_REASON_TOO_SHORT.format(field=field, val=val, len=len(val)) + elif isinstance(val, list) and all(not x for x in val): + skip_reason = FIELD_SKIP_REASON_LIST_OF_FALSY_VALS.format(field=field, val=val) + + if skip_reason: + demisto.debug(f"Skipping - {skip_reason}") + else: + demisto.debug(f"Including {field=} in similarity calculation (value in incident is: {val})") + return skip_reason + + def filter_fields( + fields_list: list[str], + valid_types: type | UnionType | None = None, + ) -> tuple[list[str], list[str]]: + fields_to_use = [] + skip_reasons = [] + for field in fields_list: + if skip_reason := find_skip_reason(field, valid_types): + skip_reasons.append(f" - {skip_reason}") + else: + fields_to_use.append(field) + return fields_to_use, skip_reasons + + self.field_for_command_line, skip_reasons = filter_fields(self.field_for_command_line, valid_types=str | list) + all_skip_reasons.extend(skip_reasons) + + self.field_for_potential_exact_match, skip_reasons = filter_fields(self.field_for_potential_exact_match, valid_types=str) + all_skip_reasons.extend(skip_reasons) + + self.field_for_json, skip_reasons = filter_fields(self.field_for_json) + all_skip_reasons.extend(skip_reasons) + + should_proceed = len( + self.field_for_command_line + self.field_for_potential_exact_match + self.field_for_json + ) != 0 + + return should_proceed, all_skip_reasons def get_score(self): """ @@ -556,8 +586,10 @@ def get_incident_by_id(incident_id: str, populate_fields: list[str], from_date: :return: Get incident acording to incident id """ populate_fields_value = ' , '.join(populate_fields) - message_of_values = build_message_of_values([incident_id, populate_fields_value, from_date, to_date]) - demisto.debug(f'Calling get_incidents_by_query, {message_of_values}') + demisto.debug( + f"Calling get_incidents_by_query for {incident_id=} between {from_date=} and {to_date=}," + f"{populate_fields_value=}" + ) incidents = get_incidents_by_query({ 'query': f"id:({incident_id})", 'populateFields': populate_fields_value, @@ -584,7 +616,7 @@ def get_all_incidents_for_time_window_and_exact_match(exact_match_fields: list[s msg = "" exact_match_fields_list = [] for exact_match_field in exact_match_fields: - if exact_match_field not in incident.keys(): + if exact_match_field not in incident: msg += "%s \n" % MESSAGE_NO_FIELD % exact_match_field else: exact_match_fields_list.append(f'{exact_match_field}: "{incident[exact_match_field]}"') @@ -594,8 +626,11 @@ def get_all_incidents_for_time_window_and_exact_match(exact_match_fields: list[s query += " %s" % query_sup populate_fields_value = ' , '.join(populate_fields) - msg_of_values = build_message_of_values([populate_fields_value, from_date, to_date, limit]) - demisto.debug(f'Calling get_incidents_by_query, {msg_of_values}') + demisto.debug( + f"Calling get_incidents_by_query between {from_date=} and {to_date=}," + f"{limit=}, {populate_fields_value=}" + ) + incidents = get_incidents_by_query({ 'query': query, 'populateFields': populate_fields_value, @@ -612,55 +647,25 @@ def get_all_incidents_for_time_window_and_exact_match(exact_match_fields: list[s return incidents, msg -def extract_fields_from_args(arg: list[str]) -> list[str]: - fields_list = [preprocess_incidents_field(x.strip(), PREFIXES_TO_REMOVE) for x in arg if x] +def extract_fields_from_args(arg: str) -> list[str]: + fields_list = [preprocess_incidents_field(x.strip(), PREFIXES_TO_REMOVE) for x in argToList(arg) if x] return list(dict.fromkeys(fields_list)) -def get_args(): # type: ignore - """ - Gets argument of this automation - :return: Argument of this automation - """ - use_all_field = demisto.args().get('useAllFields') - if use_all_field == 'True': - similar_text_field = [] - similar_json_field = ['CustomFields'] - similar_categorical_field = [] - exact_match_fields = ['type'] - else: - similar_text_field = demisto.args().get('similarTextField', '').split(',') - similar_text_field = extract_fields_from_args(similar_text_field) +def get_field_args(args) -> tuple: + use_all_field = argToBoolean(args.get("useAllFields") or "False") + exact_match_fields = [] if use_all_field else extract_fields_from_args(args.get("fieldExactMatch")) + similar_text_field = [] if use_all_field else extract_fields_from_args(args.get("similarTextField")) + similar_categorical_field = [] if use_all_field else extract_fields_from_args(args.get("similarCategoricalField")) + similar_json_field = ["CustomFields"] if use_all_field else extract_fields_from_args(args.get("similarJsonField")) - similar_categorical_field = demisto.args().get('similarCategoricalField', '').split(',') - similar_categorical_field = extract_fields_from_args(similar_categorical_field) - - similar_json_field = demisto.args().get('similarJsonField', '').split(',') - similar_json_field = extract_fields_from_args(similar_json_field) - - exact_match_fields = demisto.args().get('fieldExactMatch', '').split(',') - exact_match_fields = extract_fields_from_args(exact_match_fields) - - display_fields = demisto.args().get('fieldsToDisplay', '').split(',') - display_fields = [x.strip() for x in display_fields if x] - display_fields = list(set(['id', 'created', 'name'] + display_fields)) - display_fields = list(dict.fromkeys(display_fields)) - - from_date = demisto.args().get('fromDate') - to_date = demisto.args().get('toDate') - show_similarity = demisto.args().get('showIncidentSimilarityForAllFields') - confidence = float(demisto.args().get('minimunIncidentSimilarity')) - max_incidents = int(demisto.args().get('maxIncidentsToDisplay')) - query = demisto.args().get('query') - aggregate = demisto.args().get('aggreagateIncidentsDifferentDate') - limit = int(demisto.args()['limit']) - show_actual_incident = demisto.args().get('showCurrentIncident') - incident_id = demisto.args().get('incidentId') - include_indicators_similarity = demisto.args().get('includeIndicatorsSimilarity') - - return similar_text_field, similar_json_field, similar_categorical_field, exact_match_fields, display_fields, \ - from_date, to_date, show_similarity, confidence, max_incidents, query, aggregate, limit, \ - show_actual_incident, incident_id, include_indicators_similarity + demisto.debug( + f"{exact_match_fields=}\n" + f"{similar_text_field=}\n" + f"{similar_categorical_field=}\n" + f"{similar_json_field=}" + ) + return exact_match_fields, similar_text_field, similar_categorical_field, similar_json_field def load_current_incident(incident_id: str, populate_fields: list[str], from_date: str, to_date: str): @@ -904,24 +909,23 @@ def prepare_current_incident(incident_df: pd.DataFrame, display_fields: list[str return incident_filter -def build_message_of_values(fields: list[Any]): - """ - Prepare a message to be used in logs - :param fields: List of fields - :return: A text message snippet - """ - return "; ".join([f'{current_field}' for current_field in fields]) - - def main(): - similar_text_field, similar_json_field, similar_categorical_field, exact_match_fields, display_fields, from_date, \ - to_date, show_distance, confidence, max_incidents, query, aggregate, limit, show_actual_incident, \ - incident_id, include_indicators_similarity = get_args() - fields_values = build_message_of_values([similar_text_field, similar_json_field, similar_categorical_field, - exact_match_fields, display_fields, from_date, to_date, confidence, - max_incidents, aggregate, limit, incident_id, - ]) - demisto.debug(f"Starting,\n{fields_values=}") + args = demisto.args() + exact_match_fields, similar_text_field, similar_categorical_field, similar_json_field = get_field_args(args) + + display_fields = list(set(['id', 'created', 'name'] + argToList(args.get("fieldsToDisplay")))) + + from_date = args.get('fromDate') + to_date = args.get('toDate') + show_distance = args.get('showIncidentSimilarityForAllFields') + confidence = float(args.get('minimunIncidentSimilarity')) + max_incidents = int(args.get('maxIncidentsToDisplay')) + query = args.get('query') + aggregate = args.get('aggreagateIncidentsDifferentDate') + limit = int(args['limit']) + show_actual_incident = args.get('showCurrentIncident') + incident_id = args.get('incidentId') + include_indicators_similarity = args.get('includeIndicatorsSimilarity') global_msg = "" @@ -934,8 +938,6 @@ def main(): return_outputs_error(error_msg="%s \n" % MESSAGE_NO_CURRENT_INCIDENT % incident_id) return None, global_msg - demisto.debug(f'{exact_match_fields=}, {populate_high_level_fields=}') - # load the related incidents populate_fields.remove('id') incidents, msg = get_all_incidents_for_time_window_and_exact_match(exact_match_fields, populate_high_level_fields, @@ -973,17 +975,17 @@ def main(): model = Model(p_transformation=TRANSFORMATION) model.init_prediction(incident_df, incidents_df, similar_text_field, similar_categorical_field, display_fields, similar_json_field) - similar_incidents, fields_used = model.predict() - - if len(fields_used) == 0: - global_msg += "%s \n" % MESSAGE_NO_FIELDS_USED - return_outputs_summary(confidence, number_incident_fetched, 0, fields_used, global_msg) + try: + similar_incidents, fields_used = model.predict() + except DemistoException as e: + global_msg += "%s \n" % MESSAGE_NO_FIELDS_USED.format(str(e)) + return_outputs_summary(confidence, number_incident_fetched, 0, [], global_msg) return_outputs_similar_incidents_empty() return None, global_msg # Get similarity based on indicators if include_indicators_similarity == "True": - args_defined_by_user = {key: demisto.args().get(key) for key in KEYS_ARGS_INDICATORS} + args_defined_by_user = {key: args.get(key) for key in KEYS_ARGS_INDICATORS} full_args_indicators_script = {**CONST_PARAMETERS_INDICATORS_SCRIPT, **args_defined_by_user} similar_incidents = enriched_with_indicators_similarity(full_args_indicators_script, similar_incidents) diff --git a/Packs/Base/Scripts/DBotFindSimilarIncidents/DBotFindSimilarIncidents.yml b/Packs/Base/Scripts/DBotFindSimilarIncidents/DBotFindSimilarIncidents.yml index 2a114966535d..60fef5c54f27 100644 --- a/Packs/Base/Scripts/DBotFindSimilarIncidents/DBotFindSimilarIncidents.yml +++ b/Packs/Base/Scripts/DBotFindSimilarIncidents/DBotFindSimilarIncidents.yml @@ -14,7 +14,7 @@ args: description: Comma-separated list of additional incident fields to display, but which will not be taken into account when computing similarity. name: fieldsToDisplay - auto: PREDEFINED - description: Comma-separated list of incident fields that have to be equal to the current incident fields. This helps reduce the query size. + description: Comma-separated list of incident fields that have to be equal to the current incident fields. This helps reduce the query size. These fields are not part of the similarity calculation. name: fieldExactMatch - auto: PREDEFINED defaultValue: 'False' @@ -74,7 +74,9 @@ args: name: indicatorsTypes - description: Help to filter out indicators that appear in many incidents. Relevant if includeIndicatorsSimilarity is "True". name: maxIncidentsInIndicatorsForWhiteList -comment: Find past similar incidents based on incident fields' similarity. Includes an option to also display indicators similarity. +comment: |- + Finds past similar incidents based on incident fields' similarity. Includes an option to also display indicators similarity. + Note: For the similarity calculation, at least one field must be provided in one of the "similarTextField", "similarCategoricalField", or "similarJsonField" arguments. commonfields: id: DBotFindSimilarIncidents version: -1 @@ -84,7 +86,7 @@ script: '-' subtype: python3 timeout: '0' type: python -dockerimage: demisto/ml:1.0.0.88591 +dockerimage: demisto/ml:1.0.0.94241 runas: DBotWeakRole runonce: true tests: diff --git a/Packs/Base/Scripts/DBotFindSimilarIncidents/DBotFindSimilarIncidents_test.py b/Packs/Base/Scripts/DBotFindSimilarIncidents/DBotFindSimilarIncidents_test.py index 28cbde4f277b..1078962931cf 100644 --- a/Packs/Base/Scripts/DBotFindSimilarIncidents/DBotFindSimilarIncidents_test.py +++ b/Packs/Base/Scripts/DBotFindSimilarIncidents/DBotFindSimilarIncidents_test.py @@ -1,3 +1,4 @@ +from CommonServerPython import DemistoException import demistomock as demisto import numpy as np import pandas as pd @@ -337,21 +338,6 @@ def test_get_get_data_from_indicators_automation(): assert res is None -def test_build_message_of_values(): - from DBotFindSimilarIncidents import build_message_of_values - - assert build_message_of_values([]) == '' - - foo = 'foo_value' - assert build_message_of_values([foo]) == 'foo_value' - - bar = 'bar_value' - assert build_message_of_values([foo, bar]) == 'foo_value; bar_value' - - baz = ['baz1', 'baz2'] - assert build_message_of_values([foo, bar, baz]) == "foo_value; bar_value; ['baz1', 'baz2']" - - @pytest.fixture def sample_data(): # Create sample data for testing @@ -364,12 +350,13 @@ def sample_data(): return pd.DataFrame(data) -fields_to_match = ['created', 'Name', 'test', 'Id', 'test2', 'xdralerts'] +fields_to_match = ['created', 'Name', 'test', 'Id', 'test2', 'xdralerts', 'hello'] expected_results = ['created'] def test_remove_empty_or_short_fields(sample_data): - from DBotFindSimilarIncidents import Model + from DBotFindSimilarIncidents import Model, FIELD_SKIP_REASON_DOESNT_EXIST, \ + FIELD_SKIP_REASON_FALSY_VALUE, FIELD_SKIP_REASON_TOO_SHORT """ Given: - sample_data: a dataframe with a column of strings @@ -383,10 +370,39 @@ def test_remove_empty_or_short_fields(sample_data): my_instance.incident_to_match = sample_data my_instance.field_for_command_line = fields_to_match - my_instance.field_for_potential_exact_match = fields_to_match - my_instance.field_for_json = fields_to_match + my_instance.field_for_potential_exact_match = [] + my_instance.field_for_json = [] - my_instance.remove_empty_or_short_fields() + should_proceed, all_skip_reasons = my_instance.remove_empty_or_short_fields() assert my_instance.field_for_command_line == expected_results - assert my_instance.field_for_potential_exact_match == expected_results - assert my_instance.field_for_json == expected_results + assert should_proceed + assert all("created" not in reason for reason in all_skip_reasons) + assert f' - {FIELD_SKIP_REASON_TOO_SHORT.format(field="Name", val="t", len=1)}' in all_skip_reasons + assert f' - {FIELD_SKIP_REASON_TOO_SHORT.format(field="Id", val=["123"], len=1)}' in all_skip_reasons + assert f' - {FIELD_SKIP_REASON_FALSY_VALUE.format(field="test", val=None)}' in all_skip_reasons + assert f' - {FIELD_SKIP_REASON_FALSY_VALUE.format(field="test2", val="")}' in all_skip_reasons + assert f' - {FIELD_SKIP_REASON_FALSY_VALUE.format(field="xdralerts", val="N/A")}' in all_skip_reasons + assert f' - {FIELD_SKIP_REASON_DOESNT_EXIST.format(field="hello")}' in all_skip_reasons + + +def test_predict_without_similarity_fields(sample_data): + """ + Given: + - A Model object + When: + - No similarity fields were provided + - Calling Model.predict() + Then: + - Ensure the correct exception is raised + """ + from DBotFindSimilarIncidents import Model + model = Model({}) + model.incident_to_match = sample_data + model.field_for_command_line = [] + model.field_for_potential_exact_match = [] + model.field_for_json = [] + + with pytest.raises(DemistoException) as e: + model.predict() + + assert "No fields were provided for similarity calculation" in str(e) diff --git a/Packs/Base/Scripts/DBotFindSimilarIncidents/README.md b/Packs/Base/Scripts/DBotFindSimilarIncidents/README.md index 0db0c041e307..b60a58654123 100644 --- a/Packs/Base/Scripts/DBotFindSimilarIncidents/README.md +++ b/Packs/Base/Scripts/DBotFindSimilarIncidents/README.md @@ -1,4 +1,5 @@ Find past similar incidents based on incident fields' similarity. Includes an option to also display indicators similarity. +Note: For the similarity calculation, at least one field must be provided in one of the "similarTextField", "similarCategoricalField", or "similarJsonField" arguments. ## Script Data @@ -29,7 +30,7 @@ This script is used in the following playbooks and scripts. | similarCategoricalField | Comma-separated list of incident categorical fields to take into account whe computing similarity. For example: IP, URL. Note: In order to calculate similarity, fields must consist of a minimum of 2 letters. | | similarJsonField | Comma-separated list of incident JSON fields to take into account whe computing similarity. For example: CustomFields. Note: In order to calculate similarity, fields must consist of a minimum of 2 letters. | | fieldsToDisplay | Comma-separated list of additional incident fields to display, but which will not be taken into account when computing similarity. | -| fieldExactMatch | Comma-separated list of incident fields that have to be equal to the current incident fields. This helps reduce the query size. | +| fieldExactMatch | Comma-separated list of incident fields that have to be equal to the current incident fields. This helps reduce the query size. These fields are not part of the similarity calculation. | | useAllFields | Whether to use a predefined set of fields and custom fields to compute similarity. If "True", it will ignore values in similarTextField, similarCategoricalField, similarJsonField. | | fromDate | The start date by which to filter incidents. Date format will be the same as in the incidents query page, for example, "3 days ago", ""2019-01-01T00:00:00 \+0200"\). | | toDate | The end date by which to filter incidents. Date format will be the same as in the incidents query page, for example, "3 days ago", ""2019-01-01T00:00:00 \+0200"\). | diff --git a/Packs/Base/Scripts/DBotTrainTextClassifierV2/DBotTrainTextClassifierV2.py b/Packs/Base/Scripts/DBotTrainTextClassifierV2/DBotTrainTextClassifierV2.py index 97df7c2bb226..a557dd0921f5 100644 --- a/Packs/Base/Scripts/DBotTrainTextClassifierV2/DBotTrainTextClassifierV2.py +++ b/Packs/Base/Scripts/DBotTrainTextClassifierV2/DBotTrainTextClassifierV2.py @@ -46,8 +46,7 @@ def get_phishing_map_labels(comma_values): labels_dict[v] = v if len(set(labels_dict.values())) == 1: mapped_value = list(labels_dict.values())[0] - error = ['Label mapping error: you need to map to at least two labels: {}.'.format(mapped_value)] - return_error('\n'.join(error)) + return_error(f'Label mapping error: you need to map to at least two labels: {mapped_value}.') return {k.encode('utf-8', 'ignore').decode("utf-8"): v for k, v in labels_dict.items()} @@ -81,20 +80,19 @@ def read_file(input_data, input_type): def get_file_entry_id(file_name): file_name = file_name.strip() - res = demisto.dt(demisto.context(), "File(val.Name == '%s')" % file_name) + res = demisto.dt(demisto.context(), f"File(val.Name == '{file_name}')") if not res or len(res) == 0: - return_error("Cannot find file entry id in context by filename: " + file_name) - if type(res) is list: + return_error(f"Cannot find file entry id in context by filename: {file_name}") + if isinstance(res, list): res = res[0] return res['EntryID'] def read_files_by_name(file_names, input_type): - file_names = file_names.split(",") - file_names = [f for f in file_names if f] + names = filter(None, argToList(file_names)) # type: ignore[var-annotated] data = [] - for file_name in file_names: - data += read_file(get_file_entry_id(file_name), input_type) + for name in names: + data += read_file(get_file_entry_id(name), input_type) return data @@ -372,18 +370,18 @@ def validate_confusion_matrix(confusion_matrix): def main(): - input = demisto.args()['input'] - input_type = demisto.args()['inputType'] - model_name = demisto.args()['modelName'] - store_model = demisto.args()['storeModel'] == 'true' + input = demisto.args().get('input') + input_type = demisto.args().get('inputType', 'pickle_filename') + model_name = demisto.args().get('modelName', 'phishing_model') + store_model = demisto.args().get('storeModel') == 'true' model_override = demisto.args().get('overrideExistingModel', 'false') == 'true' - target_accuracy = float(demisto.args()['targetAccuracy']) - text_field = demisto.args()['textField'] - tag_fields = demisto.args()['tagField'].split(",") - labels_mapping = get_phishing_map_labels(demisto.args()['phishingLabels']) - keyword_min_score = float(demisto.args()['keywordMinScore']) + target_accuracy = float(demisto.args().get('targetAccuracy', '0.8')) + text_field = demisto.args().get('textField', 'dbot_processed_text') + tag_fields = demisto.args().get('tagField').split(",") + labels_mapping = get_phishing_map_labels(demisto.args().get('phishingLabels')) + keyword_min_score = float(demisto.args().get('keywordMinScore', '0.05')) return_predictions_on_test_set = demisto.args().get('returnPredictionsOnTestSet', 'false') == 'true' - original_text_fields = demisto.args().get('originalTextFields', '') + original_text_fields = demisto.args().get('originalTextFields', 'emailsubject|name,emailbody|emailbodyhtml') algorithm = demisto.args().get('trainingAlgorithm', AUTO_TRAINING_ALGO) # FASTTEXT_TRAINING_ALGO and FROM_SCRATCH_TRAINING_ALGO are equivalent, replacement is done because ml_lib # expects algorithm as one of (FASTTEXT_TRAINING_ALGO, FINETUNE_TRAINING_ALGO) @@ -394,64 +392,58 @@ def main(): else: data = read_file(input, input_type) - if len(data) == 0: - err = ['No incidents were received.'] - err += ['Make sure that all arguments are set correctly and that incidents exist in the environment.'] - return_error(' '.join(err)) if len(data) < MIN_INCIDENTS_THRESHOLD: - err = ['Only {} incident(s) were received.'.format(len(data))] - err += ['Minimum number of incidents per label required for training is {}.'.format(MIN_INCIDENTS_THRESHOLD)] - err += ['Make sure that all arguments are set correctly and that enough incidents exist in the environment.'] - return_error('\n'.join(err)) - - data = set_tag_field(data, tag_fields) - data, exist_labels_counter, missing_labels_counter = get_data_with_mapped_label(data, labels_mapping, - DBOT_TAG_FIELD) - validate_data_and_labels(data, exist_labels_counter, labels_mapping, missing_labels_counter) - # print important words for each category - find_keywords_bool = 'findKeywords' in demisto.args() and demisto.args()['findKeywords'] == 'true' - if find_keywords_bool: - try: - find_keywords(data, DBOT_TAG_FIELD, text_field, keyword_min_score) - except Exception: - pass - X, y = get_X_and_y_from_data(data, text_field) - algorithm = validate_labels_and_decide_algorithm(y, algorithm) - test_index, train_index = get_train_and_test_sets_indices(X, y) - X_train, X_test = [X[i] for i in train_index], [X[i] for i in test_index] - y_train, y_test = [y[i] for i in train_index], [y[i] for i in test_index] - phishing_model = demisto_ml.train_model_handler(X_train, y_train, algorithm=algorithm, compress=False) - ft_test_predictions = phishing_model.predict(X_test) - y_pred = [{y_tuple[0]: float(y_tuple[1])} for y_tuple in ft_test_predictions] - if return_predictions_on_test_set: - return_file_result_with_predictions_on_test_set(data, original_text_fields, test_index, text_field, y_test, - y_pred) - if 'maxBelowThreshold' in demisto.args(): - target_recall = 1 - float(demisto.args()['maxBelowThreshold']) + return_results( + f'{len(data)} incident(s) received.' + '\nMinimum number of incidents per label required for training: {MIN_INCIDENTS_THRESHOLD}.' + '\nMake sure that all arguments are set correctly and that enough incidents exist in the environment.' + ) else: - target_recall = 0 - threshold_metrics_entry = get_ml_model_evaluation(y_test, y_pred, target_accuracy, target_recall, detailed=True) - # show results for the threshold found - last result so it will appear first - confusion_matrix, metrics_json = output_model_evaluation(model_name=model_name, y_test=y_test, y_pred=y_pred, - res=threshold_metrics_entry, - context_field='DBotPhishingClassifier') - actual_min_accuracy = min(v for k, v in metrics_json['Precision'].items() if k != 'All') - if store_model: - del phishing_model - gc.collect() - if not validate_confusion_matrix(confusion_matrix): - return_error("The trained model didn't manage to predict some of the classes. This model won't be stored." - "Please try to retrain the model using a different configuration.") - y_test_pred = [y_tuple[0] for y_tuple in ft_test_predictions] - y_test_pred_prob = [y_tuple[1] for y_tuple in ft_test_predictions] - threshold = float(threshold_metrics_entry['Contents']['threshold']) - store_model_in_demisto(model_name=model_name, model_override=model_override, X=X, y=y, - confusion_matrix=confusion_matrix, threshold=threshold, - y_test_true=y_test, y_test_pred=y_test_pred, y_test_pred_prob=y_test_pred_prob, - target_accuracy=actual_min_accuracy, algorithm=algorithm) - demisto.results("Done training on {} samples model stored successfully".format(len(y))) - else: - demisto.results('Skip storing model') + data = set_tag_field(data, tag_fields) + data, exist_labels_counter, missing_labels_counter = get_data_with_mapped_label(data, labels_mapping, + DBOT_TAG_FIELD) + validate_data_and_labels(data, exist_labels_counter, labels_mapping, missing_labels_counter) + # print important words for each category + find_keywords_bool = 'findKeywords' in demisto.args() and demisto.args()['findKeywords'] == 'true' + if find_keywords_bool: + try: + find_keywords(data, DBOT_TAG_FIELD, text_field, keyword_min_score) + except Exception: + pass + X, y = get_X_and_y_from_data(data, text_field) + algorithm = validate_labels_and_decide_algorithm(y, algorithm) + test_index, train_index = get_train_and_test_sets_indices(X, y) + X_train, X_test = [X[i] for i in train_index], [X[i] for i in test_index] + y_train, y_test = [y[i] for i in train_index], [y[i] for i in test_index] + phishing_model = demisto_ml.train_model_handler(X_train, y_train, algorithm=algorithm, compress=False) + ft_test_predictions = phishing_model.predict(X_test) + y_pred = [{y_tuple[0]: float(y_tuple[1])} for y_tuple in ft_test_predictions] + if return_predictions_on_test_set: + return_file_result_with_predictions_on_test_set(data, original_text_fields, test_index, text_field, y_test, + y_pred) + target_recall = 1 - float(demisto.args().get('maxBelowThreshold', 1)) + threshold_metrics_entry = get_ml_model_evaluation(y_test, y_pred, target_accuracy, target_recall, detailed=True) + # show results for the threshold found - last result so it will appear first + confusion_matrix, metrics_json = output_model_evaluation(model_name=model_name, y_test=y_test, y_pred=y_pred, + res=threshold_metrics_entry, + context_field='DBotPhishingClassifier') + actual_min_accuracy = min(v for k, v in metrics_json['Precision'].items() if k != 'All') + if store_model: + del phishing_model + gc.collect() + if not validate_confusion_matrix(confusion_matrix): + return_error("The trained model didn't manage to predict some of the classes. This model won't be stored." + "Please try to retrain the model using a different configuration.") + y_test_pred = [y_tuple[0] for y_tuple in ft_test_predictions] + y_test_pred_prob = [y_tuple[1] for y_tuple in ft_test_predictions] + threshold = float(threshold_metrics_entry['Contents']['threshold']) + store_model_in_demisto(model_name=model_name, model_override=model_override, X=X, y=y, + confusion_matrix=confusion_matrix, threshold=threshold, + y_test_true=y_test, y_test_pred=y_test_pred, y_test_pred_prob=y_test_pred_prob, + target_accuracy=actual_min_accuracy, algorithm=algorithm) + demisto.results(f"Done training on {len(y)} samples model stored successfully") + else: + demisto.results('Skip storing model') if __name__ in ['builtins', '__main__']: diff --git a/Packs/Base/Scripts/DBotTrainTextClassifierV2/DBotTrainTextClassifierV2.yml b/Packs/Base/Scripts/DBotTrainTextClassifierV2/DBotTrainTextClassifierV2.yml index de43041cf189..f57578960181 100644 --- a/Packs/Base/Scripts/DBotTrainTextClassifierV2/DBotTrainTextClassifierV2.yml +++ b/Packs/Base/Scripts/DBotTrainTextClassifierV2/DBotTrainTextClassifierV2.yml @@ -30,7 +30,7 @@ args: description: The field name with the text to train. name: textField - defaultValue: '*' - description: 'A comma-separated list of email tags values and mapping. The script considers only the tags specified in this field. You can map label to another value by using this format: LABEL:MAPPED_LABEL. For example, for 4 values in email tag: malicious, credentials harvesting, inner communitcation, external legit email, unclassified. While training, we want to ignore "unclassified" tag, and refer to "credentials harvesting" as "malicious" too. Also, we want to merge "inner communitcation" and "external legit email" to one tag called "non-malicious". The input will be: malicious, credentials harvesting:malicious, inner communitcation:non-malicious, external legit email:non-malicious' + description: 'A comma-separated list of email tags values and mapping. The script considers only the tags specified in this field. You can map label to another value by using this format: LABEL:MAPPED_LABEL. For example, for 4 values in email tag: malicious, credentials harvesting, inner communitcation, external legit email, unclassified. While training, we want to ignore "unclassified" tag, and refer to "credentials harvesting" as "malicious" too. Also, we want to merge "inner communitcation" and "external legit email" to one tag called "non-malicious". The input will be: malicious, credentials harvesting:malicious, inner communitcation:non-malicious, external legit email:non-malicious.' name: phishingLabels - defaultValue: '0.8' description: The ratio of the training set to the entire data set, which is used for model evaluation. @@ -121,7 +121,7 @@ tags: - ml timeout: 12µs type: python -dockerimage: demisto/ml:1.0.0.62124 +dockerimage: demisto/ml:1.0.0.93129 runonce: true tests: - Create Phishing Classifier V2 ML Test diff --git a/Packs/Base/Scripts/DBotTrainTextClassifierV2/dbot_train_text_classifier_test.py b/Packs/Base/Scripts/DBotTrainTextClassifierV2/DBotTrainTextClassifierV2_test.py similarity index 100% rename from Packs/Base/Scripts/DBotTrainTextClassifierV2/dbot_train_text_classifier_test.py rename to Packs/Base/Scripts/DBotTrainTextClassifierV2/DBotTrainTextClassifierV2_test.py diff --git a/Packs/Base/Scripts/SanePdfReport/SanePdfReport.yml b/Packs/Base/Scripts/SanePdfReport/SanePdfReport.yml index 97780f9a7ae9..3bb422b6c638 100644 --- a/Packs/Base/Scripts/SanePdfReport/SanePdfReport.yml +++ b/Packs/Base/Scripts/SanePdfReport/SanePdfReport.yml @@ -51,7 +51,7 @@ tags: - pdf timeout: '0' type: python -dockerimage: demisto/sane-pdf-reports:1.0.0.91719 +dockerimage: demisto/sane-pdf-reports:1.0.0.93953 runas: DBotWeakRole tests: - No Test diff --git a/Packs/Base/pack_metadata.json b/Packs/Base/pack_metadata.json index 78a3172c4882..e2cde033cf75 100644 --- a/Packs/Base/pack_metadata.json +++ b/Packs/Base/pack_metadata.json @@ -2,7 +2,7 @@ "name": "Base", "description": "The base pack for Cortex XSOAR.", "support": "xsoar", - "currentVersion": "1.33.52", + "currentVersion": "1.34.6", "author": "Cortex XSOAR", "serverMinVersion": "6.0.0", "url": "https://www.paloaltonetworks.com/cortex", diff --git a/Packs/Blueliv/Integrations/Blueliv/Blueliv.yml b/Packs/Blueliv/Integrations/Blueliv/Blueliv.yml index c75086c7fd36..33e83c95226f 100644 --- a/Packs/Blueliv/Integrations/Blueliv/Blueliv.yml +++ b/Packs/Blueliv/Integrations/Blueliv/Blueliv.yml @@ -22,21 +22,26 @@ configuration: name: proxy type: 8 required: false -description: Blueliv reduces risk through actionable, dynamic and targeted threat intelligence, trusted by your organization. -display: Blueliv (Beta) +description: Deprecated. No available replacement. +display: Blueliv (Beta) (Deprecated) name: Blueliv_Beta script: commands: - description: Data set collection that gives the latest STIX Indicators about bot ips gathered by Blueliv. name: blueliv-get-botips-feed + deprecated: true - description: Data set collection that gives the latest STIX Indicators about known malicious servers gathered by Blueliv. name: blueliv-get-crimeservers-feed + deprecated: true - description: Data set collection that gives the latest STIX Indicators about malware hashes gathered and analyzed by Blueliv. name: blueliv-get-malware-feed + deprecated: true - description: Data set collection that gives the latest STIX Indicators about attacking IPs gathered and analyzed by Blueliv. name: blueliv-get-attackingips-feed + deprecated: true - description: 'Data related to the number of hacktivism tweets recently created. Blueliv provides two types of feeds: the first one contains the most popular hacktivism hashtags and the second one contains the countries where more number of hacktivism tweets are coming from.' name: blueliv-get-hacktivism-feed + deprecated: true dockerimage: demisto/blueliv:1.0.0.76921 runonce: false script: '' @@ -44,5 +49,6 @@ script: subtype: python2 beta: true tests: -- no test +- No tests (deprecated) fromversion: 5.0.0 +deprecated: true diff --git a/Packs/Blueliv/ReleaseNotes/1_0_4.md b/Packs/Blueliv/ReleaseNotes/1_0_4.md new file mode 100644 index 000000000000..f8441e6d9518 --- /dev/null +++ b/Packs/Blueliv/ReleaseNotes/1_0_4.md @@ -0,0 +1,6 @@ + +#### Integrations + +##### Blueliv (Beta) (Deprecated) + +Deprecated. No available replacement. diff --git a/Packs/Blueliv/pack_metadata.json b/Packs/Blueliv/pack_metadata.json index 69cf2e10ef17..48bd7762817a 100644 --- a/Packs/Blueliv/pack_metadata.json +++ b/Packs/Blueliv/pack_metadata.json @@ -1,8 +1,8 @@ { - "name": "Blueliv (Beta)", - "description": "Blueliv reduces risk through actionable, dynamic and targeted threat intelligence, trusted by your organization.", + "name": "Blueliv (Beta) (Deprecated)", + "description": "Deprecated. No available replacement.", "support": "xsoar", - "currentVersion": "1.0.3", + "currentVersion": "1.0.4", "author": "Cortex XSOAR", "url": "https://www.paloaltonetworks.com/cortex", "email": "", diff --git a/Packs/Box/Integrations/BoxEventsCollector/README.md b/Packs/Box/Integrations/BoxEventsCollector/README.md index eca4b8e20e36..2037823f8794 100644 --- a/Packs/Box/Integrations/BoxEventsCollector/README.md +++ b/Packs/Box/Integrations/BoxEventsCollector/README.md @@ -7,6 +7,10 @@ Collect events from Box's logs. The command is using the [events endpoint](https://developer.box.com/reference/get-events/) with enterprise login. The user making the API call will need to have admin privileges, and the application will need to have the scope manage enterprise properties checked. +<~XSIAM> +This is the default integration for this content pack when configured by the Data Onboarder. + + ## Configure Box Event Collector on Cortex XSOAR To acquire the "Credential JSON", you need to get a JWT token and an app from Box. diff --git a/Packs/Box/pack_metadata.json b/Packs/Box/pack_metadata.json index a431e6d859a5..40a037ac6cec 100644 --- a/Packs/Box/pack_metadata.json +++ b/Packs/Box/pack_metadata.json @@ -16,5 +16,6 @@ "marketplaces": [ "xsoar", "marketplacev2" - ] + ], + "defaultDataSource": "BoxEventsCollector" } \ No newline at end of file diff --git a/Packs/CVE_2022_3786_and_CVE_2022_3602_-_OpenSSL_X.509_Buffer_Overflows/Playbooks/playbook-CVE-2022-3786_&_CVE-2022-3602_-_OpenSSL_X.509_Buffer_Overflows_README.md b/Packs/CVE_2022_3786_and_CVE_2022_3602_-_OpenSSL_X.509_Buffer_Overflows/Playbooks/playbook-CVE-2022-3786_&_CVE-2022-3602_-_OpenSSL_X.509_Buffer_Overflows_README.md index bd2a5992b063..30b1d369d4e6 100644 --- a/Packs/CVE_2022_3786_and_CVE_2022_3602_-_OpenSSL_X.509_Buffer_Overflows/Playbooks/playbook-CVE-2022-3786_&_CVE-2022-3602_-_OpenSSL_X.509_Buffer_Overflows_README.md +++ b/Packs/CVE_2022_3786_and_CVE_2022_3602_-_OpenSSL_X.509_Buffer_Overflows/Playbooks/playbook-CVE-2022-3786_&_CVE-2022-3602_-_OpenSSL_X.509_Buffer_Overflows_README.md @@ -77,4 +77,4 @@ There are no outputs for this playbook. --- -![CVE-2022-3786 & CVE-2022-3602 - OpenSSL X.509 Buffer Overflows](../doc_files/CVE-2022-3786_&_CVE-2022-3602_-_OpenSSL_X.509_Buffer_Overflows.png) +![CVE-2022-3786 & CVE-2022-3602 - OpenSSL X.509 Buffer Overflows](../doc_files/CVE-2022-3786_and_CVE-2022-3602_-_OpenSSL_X_509_Buffer_Overflows.png) diff --git a/Packs/CVE_2022_3786_and_CVE_2022_3602_-_OpenSSL_X.509_Buffer_Overflows/doc_files/CVE-2022-3786_&_CVE-2022-3602_-_OpenSSL_X.509_Buffer_Overflows.png b/Packs/CVE_2022_3786_and_CVE_2022_3602_-_OpenSSL_X.509_Buffer_Overflows/doc_files/CVE-2022-3786_and_CVE-2022-3602_-_OpenSSL_X_509_Buffer_Overflows.png similarity index 100% rename from Packs/CVE_2022_3786_and_CVE_2022_3602_-_OpenSSL_X.509_Buffer_Overflows/doc_files/CVE-2022-3786_&_CVE-2022-3602_-_OpenSSL_X.509_Buffer_Overflows.png rename to Packs/CVE_2022_3786_and_CVE_2022_3602_-_OpenSSL_X.509_Buffer_Overflows/doc_files/CVE-2022-3786_and_CVE-2022-3602_-_OpenSSL_X_509_Buffer_Overflows.png diff --git a/Packs/CVE_2022_41040_and_CVE_2022_41082_-_ProxyNotShell/Playbooks/playbook-CVE-2022-41040_&_CVE-2022-41082_-_ProxyNotShell_README.md b/Packs/CVE_2022_41040_and_CVE_2022_41082_-_ProxyNotShell/Playbooks/playbook-CVE-2022-41040_&_CVE-2022-41082_-_ProxyNotShell_README.md index 4d7ed23e3432..d13f2487b426 100644 --- a/Packs/CVE_2022_41040_and_CVE_2022_41082_-_ProxyNotShell/Playbooks/playbook-CVE-2022-41040_&_CVE-2022-41082_-_ProxyNotShell_README.md +++ b/Packs/CVE_2022_41040_and_CVE_2022_41082_-_ProxyNotShell/Playbooks/playbook-CVE-2022-41040_&_CVE-2022-41082_-_ProxyNotShell_README.md @@ -104,4 +104,4 @@ There are no outputs for this playbook. ## Playbook Image --- -![CVE-2022-41040 & CVE-2022-41082 - ProxyNotShell](../doc_files/CVE-2022-41040_&_CVE-2022-41082_-_ProxyNotShell.png) \ No newline at end of file +![CVE-2022-41040 & CVE-2022-41082 - ProxyNotShell](../doc_files/CVE-2022-41040_and_CVE-2022-41082_-_ProxyNotShell.png) \ No newline at end of file diff --git a/Packs/CVE_2022_41040_and_CVE_2022_41082_-_ProxyNotShell/doc_files/CVE-2022-41040_&_CVE-2022-41082_-_ProxyNotShell.png b/Packs/CVE_2022_41040_and_CVE_2022_41082_-_ProxyNotShell/doc_files/CVE-2022-41040_and_CVE-2022-41082_-_ProxyNotShell.png similarity index 100% rename from Packs/CVE_2022_41040_and_CVE_2022_41082_-_ProxyNotShell/doc_files/CVE-2022-41040_&_CVE-2022-41082_-_ProxyNotShell.png rename to Packs/CVE_2022_41040_and_CVE_2022_41082_-_ProxyNotShell/doc_files/CVE-2022-41040_and_CVE-2022-41082_-_ProxyNotShell.png diff --git a/Packs/Campaign/Playbooks/Detect_&_Manage_Phishing_Campaigns_README.md b/Packs/Campaign/Playbooks/Detect_&_Manage_Phishing_Campaigns_README.md index 418e13ab9881..a262a43dc765 100644 --- a/Packs/Campaign/Playbooks/Detect_&_Manage_Phishing_Campaigns_README.md +++ b/Packs/Campaign/Playbooks/Detect_&_Manage_Phishing_Campaigns_README.md @@ -58,4 +58,4 @@ There are no outputs for this playbook. ## Playbook Image --- -![Detect & Manage Phishing Campaigns](../doc_files/Detect_&_Manage_Phishing_Campaigns.png) \ No newline at end of file +![Detect & Manage Phishing Campaigns](../doc_files/Detect_and_Manage_Phishing_Campaigns.png) \ No newline at end of file diff --git a/Packs/Campaign/doc_files/Detect_&_Manage_Phishing_Campaigns.png b/Packs/Campaign/doc_files/Detect_and_Manage_Phishing_Campaigns.png similarity index 100% rename from Packs/Campaign/doc_files/Detect_&_Manage_Phishing_Campaigns.png rename to Packs/Campaign/doc_files/Detect_and_Manage_Phishing_Campaigns.png diff --git a/Packs/CarbonBlackDefense/Integrations/CarbonBlackEndpointStandardEventCollector/README.md b/Packs/CarbonBlackDefense/Integrations/CarbonBlackEndpointStandardEventCollector/README.md index b4c2d74b97d5..2a4c044518a4 100644 --- a/Packs/CarbonBlackDefense/Integrations/CarbonBlackEndpointStandardEventCollector/README.md +++ b/Packs/CarbonBlackDefense/Integrations/CarbonBlackEndpointStandardEventCollector/README.md @@ -1,5 +1,9 @@ Endpoint Standard (formerly called Carbon Black Defense), a Next-Generation Anti-Virus + EDR. Collect Anti-Virus & EDR alerts and Audit Log Events. +<~XSIAM> +This is the default integration for this content pack when configured by the Data Onboarder. + + ## Configure Carbon Black Endpoint Standard Event Collector on Cortex XSOAR 1. Navigate to **Settings** > **Integrations** > **Servers & Services**. diff --git a/Packs/CarbonBlackDefense/pack_metadata.json b/Packs/CarbonBlackDefense/pack_metadata.json index 8c6b0c1b76ea..b7023f6955fa 100644 --- a/Packs/CarbonBlackDefense/pack_metadata.json +++ b/Packs/CarbonBlackDefense/pack_metadata.json @@ -16,5 +16,6 @@ "marketplaces": [ "xsoar", "marketplacev2" - ] + ], + "defaultDataSource": "CarbonBlackEndpointStandardEventCollector" } \ No newline at end of file diff --git a/Packs/Carbon_Black_Enterprise_Live_Response/Playbooks/playbook-Get_File_Sample_From_Path_-_VMware_Carbon_Black_EDR_-_Live_Response_API_README.md b/Packs/Carbon_Black_Enterprise_Live_Response/Playbooks/playbook-Get_File_Sample_From_Path_-_VMware_Carbon_Black_EDR_-_Live_Response_API_README.md index 4de0688efc68..99683c78b993 100644 --- a/Packs/Carbon_Black_Enterprise_Live_Response/Playbooks/playbook-Get_File_Sample_From_Path_-_VMware_Carbon_Black_EDR_-_Live_Response_API_README.md +++ b/Packs/Carbon_Black_Enterprise_Live_Response/Playbooks/playbook-Get_File_Sample_From_Path_-_VMware_Carbon_Black_EDR_-_Live_Response_API_README.md @@ -47,4 +47,4 @@ Exists ## Playbook Image --- -![Get File Sample From Path - VMware Carbon Black EDR - Live Response API](https://raw.githubusercontent.com/demisto/content/8eb0c6e3e592d9eedbcf72b025c403d44a5ba395/Packs/Carbon_Black_Enterprise_Live_Response/doc_files/Get_File_Sample_From_Path_-_VMware_Carbon_Black_EDR_(Live_Response_API).png) +![Get File Sample From Path - VMware Carbon Black EDR - Live Response API](https://raw.githubusercontent.com/demisto/content/master/Packs/Carbon_Black_Enterprise_Live_Response/doc_files/Get_File_Sample_From_Path_-_VMware_Carbon_Black_EDR_Live_Response_API.png) diff --git a/Packs/Carbon_Black_Enterprise_Live_Response/doc_files/Get_File_Sample_From_Path_-_VMware_Carbon_Black_EDR_(Live_Response_API).png b/Packs/Carbon_Black_Enterprise_Live_Response/doc_files/Get_File_Sample_From_Path_-_VMware_Carbon_Black_EDR_Live_Response_API.png similarity index 100% rename from Packs/Carbon_Black_Enterprise_Live_Response/doc_files/Get_File_Sample_From_Path_-_VMware_Carbon_Black_EDR_(Live_Response_API).png rename to Packs/Carbon_Black_Enterprise_Live_Response/doc_files/Get_File_Sample_From_Path_-_VMware_Carbon_Black_EDR_Live_Response_API.png diff --git a/Packs/Carbon_Black_Enterprise_Response/Playbooks/playbook-Block_Endpoint_-_Carbon_Black_Response_V2.1_README.md b/Packs/Carbon_Black_Enterprise_Response/Playbooks/playbook-Block_Endpoint_-_Carbon_Black_Response_V2.1_README.md index 2e7a515a2f1f..bf9f95609112 100644 --- a/Packs/Carbon_Black_Enterprise_Response/Playbooks/playbook-Block_Endpoint_-_Carbon_Black_Response_V2.1_README.md +++ b/Packs/Carbon_Black_Enterprise_Response/Playbooks/playbook-Block_Endpoint_-_Carbon_Black_Response_V2.1_README.md @@ -92,4 +92,4 @@ CarbonBlackResponseV2 --- -![Block Endpoint - Carbon Black Response V2.1](../doc_files/Block_Endpoint_-_Carbon_Black_Response_V2.1.png) +![Block Endpoint - Carbon Black Response V2.1](../doc_files/Block_Endpoint_-_Carbon_Black_Response_V2_1.png) \ No newline at end of file diff --git a/Packs/Carbon_Black_Enterprise_Response/Playbooks/playbook-Get_binary_file_from_Carbon_Black_by_its_MD5_hash_README.md b/Packs/Carbon_Black_Enterprise_Response/Playbooks/playbook-Get_binary_file_from_Carbon_Black_by_its_MD5_hash_README.md index 43a4ef674ac5..6c7ff1d0d989 100644 --- a/Packs/Carbon_Black_Enterprise_Response/Playbooks/playbook-Get_binary_file_from_Carbon_Black_by_its_MD5_hash_README.md +++ b/Packs/Carbon_Black_Enterprise_Response/Playbooks/playbook-Get_binary_file_from_Carbon_Black_by_its_MD5_hash_README.md @@ -41,4 +41,4 @@ cb-binary-download ## Playbook Image --- -![Get binary file from Carbon Black by its MD5 hash](https://raw.githubusercontent.com/demisto/content/8eb0c6e3e592d9eedbcf72b025c403d44a5ba395/Packs/Carbon_Black_Enterprise_Response/doc_files/Get_binary_file_from_Carbon_Black_by_MD5_hash.png) +![Get binary file from Carbon Black by its MD5 hash](https://raw.githubusercontent.com/demisto/content/master/Packs/Carbon_Black_Enterprise_Response/doc_files/Get_binary_file_from_Carbon_Black_by_MD5_hash.png) diff --git a/Packs/Carbon_Black_Enterprise_Response/doc_files/Block_Endpoint_-_Carbon_Black_Response_V2.1.png b/Packs/Carbon_Black_Enterprise_Response/doc_files/Block_Endpoint_-_Carbon_Black_Response_V2_1.png similarity index 100% rename from Packs/Carbon_Black_Enterprise_Response/doc_files/Block_Endpoint_-_Carbon_Black_Response_V2.1.png rename to Packs/Carbon_Black_Enterprise_Response/doc_files/Block_Endpoint_-_Carbon_Black_Response_V2_1.png diff --git a/Packs/Censys/Integrations/CensysV2/CensysV2.py b/Packs/Censys/Integrations/CensysV2/CensysV2.py index 69a7c91e248f..78d758300eea 100644 --- a/Packs/Censys/Integrations/CensysV2/CensysV2.py +++ b/Packs/Censys/Integrations/CensysV2/CensysV2.py @@ -3,7 +3,7 @@ from CommonServerUserPython import * # noqa import urllib3 -from typing import Dict, Any +from typing import Any # Disable insecure warnings urllib3.disable_warnings() # pylint: disable=no-member @@ -13,17 +13,17 @@ class Client(BaseClient): - def censys_view_request(self, index: str, query: str) -> Dict: + def censys_view_request(self, index: str, query: str) -> dict: if index == 'ipv4': - url_suffix = f'v2/hosts/{query}' + url_suffix = f'/api/v2/hosts/{query}' else: - url_suffix = f'v1/view/certificates/{query}' + url_suffix = f'/api/v2/certificates/{query}' res = self._http_request('GET', url_suffix) return res - def censys_search_ip_request(self, query: Dict, page_size: int) -> Dict: - url_suffix = 'v2/hosts/search' + def censys_search_ip_request(self, query: dict, page_size: int) -> dict: + url_suffix = '/api/v2/hosts/search' params = { 'q': query, 'per_page': page_size @@ -31,37 +31,67 @@ def censys_search_ip_request(self, query: Dict, page_size: int) -> Dict: res = self._http_request('GET', url_suffix, params=params) return res - def censys_search_certs_request(self, data: Dict) -> Dict: - url_suffix = 'v1/search/certificates' - res = self._http_request('POST', url_suffix, json_data=data) + def censys_search_certs_request(self, data: dict) -> dict: + url_suffix = '/api/v2/certificates/search' + res = self._http_request('GET', url_suffix, json_data=data) + return res + + def ip_reputation_request(self, ip: str, fields: list | None): + url_suffix = f"/api/v2/hosts/search?q=ip={ip}" + if fields: + url_suffix += f"&fields={','.join(fields)}" + + res = self._http_request('GET', url_suffix) + return res + + def domain_reputation_request(self, domain: str): + url_suffix = f"/api/v2/hosts/search?q=dns.names={domain}" + res = self._http_request('GET', url_suffix) return res ''' COMMAND FUNCTIONS ''' -def test_module(client: Client) -> str: - client.censys_view_request('ipv4', '8.8.8.8') - return 'ok' +def test_module(client: Client, params: dict[str, Any]) -> str: + # Check if the user has selected malicious or suspicious labels without premium access + if not params.get('premium_access') and (params.get('malicious_labels') or params.get('suspicious_labels')): + raise DemistoException( + "The 'Determine IP score by label' feature only works for Censys paid subscribers." + "if you have paid access select the 'Determine IP score by label' option " + "to utilize this functionality, or deselect labels") + fields = ['labels'] if params.get('premium_access') else None -def censys_view_command(client: Client, args: Dict[str, Any]) -> CommandResults: + try: + client.ip_reputation_request('8.8.8.8', fields) + return 'ok' + except DemistoException as e: + # Handle permission error for non-premium users attempting to access premium features + if e.res.status_code == 403 and 'specific fields' in e.message: + raise DemistoException( + "Your user does not have permission for premium features. " + "Please ensure that you deselect the 'Determine IP score by label' option " + "for non-premium access.") + raise e + + +def censys_view_command(client: Client, args: dict[str, Any]) -> CommandResults: """ Returns host information for the specified IP address or structured certificate data for the specified SHA-256 """ index = args.get('index', '') query = args.get('query', '') res = client.censys_view_request(index, query) + result = res.get('result', {}) if index == 'ipv4': - result = res.get('result', {}) content = { - 'Name': result.get('autonomous_system', {}).get('name'), - 'Bgp Prefix': result.get('autonomous_system', {}).get('bgp_prefix'), + 'Network': result.get('autonomous_system', {}).get('name'), + 'Routing': result.get('autonomous_system', {}).get('bgp_prefix'), 'ASN': result.get('autonomous_system', {}).get('asn'), - 'Service': [{ - 'Port': service.get('port'), - 'Service Name': service.get('service_name') - } for service in result.get('services', [])], + 'Protocols': ', '.join([ + f"{service.get('port')}/{service.get('service_name')}" + for service in result.get('services', [])]), 'Last Updated': result.get('last_updated_at') } @@ -87,7 +117,7 @@ def censys_view_command(client: Client, args: Dict[str, Any]) -> CommandResults: geo_country=country, as_owner=demisto.get(result, 'autonomous_system.name')) - human_readable = tableToMarkdown(f'Information for IP {query}', content) + human_readable = tableToMarkdown(f'Information for IP {query}', content, removeNull=True) return CommandResults( readable_output=human_readable, outputs_prefix='Censys.View', @@ -97,25 +127,27 @@ def censys_view_command(client: Client, args: Dict[str, Any]) -> CommandResults: raw_response=res ) else: - metadata = res.get('metadata', {}) content = { - 'SHA 256': res.get('fingerprint_sha256'), - 'Tags': res.get('tags'), - 'Source': metadata.get('source'), - 'Added': metadata.get('added_at'), - 'Updated': metadata.get('updated_at') + 'Added At': result.get('added_at'), + 'Modified At': result.get('modified_at'), + 'Browser Trust': [ + f"{name}: {'Valid' if val.get('is_valid') else 'Invalid'}" + for name, val in result.get('validation', {}).items()], + 'SHA 256': result.get('fingerprint_sha256'), + 'Tags': result.get('tags'), + 'Source': result.get('source'), } - human_readable = tableToMarkdown('Information for certificate', content) + human_readable = tableToMarkdown('Information for certificate', content, removeNull=True) return CommandResults( readable_output=human_readable, outputs_prefix='Censys.View', outputs_key_field='fingerprint_sha256', - outputs=res, + outputs=result, raw_response=res ) -def censys_search_command(client: Client, args: Dict[str, Any]) -> CommandResults: +def censys_search_command(client: Client, args: dict[str, Any]) -> CommandResults: """ Returns previews of hosts matching a specified search query or a list of certificates that match the given query. """ @@ -134,15 +166,17 @@ def censys_search_command(client: Client, args: Dict[str, Any]) -> CommandResult for hit in hits: contents.append({ 'IP': hit.get('ip'), - 'Services': hit.get('services'), + 'Services': ', '.join([ + f"{service.get('port')}/{service.get('service_name')}" + for service in hit.get('services', [])]), 'Location Country code': hit.get('location', {}).get('country_code'), - 'Registered Country Code': hit.get('location', {}).get('registered_country_code'), 'ASN': hit.get('autonomous_system', {}).get('asn'), 'Description': hit.get('autonomous_system', {}).get('description'), - 'Name': hit.get('autonomous_system', {}).get('name') + 'Name': hit.get('autonomous_system', {}).get('name'), + 'Registered Country Code': hit.get('location', {}).get('registered_country_code'), }) headers = ['IP', 'Name', 'Description', 'ASN', 'Location Country code', 'Registered Country Code', 'Services'] - human_readable = tableToMarkdown(f'Search results for query "{query}"', contents, headers) + human_readable = tableToMarkdown(f'Search results for query "{query}"', contents, headers, removeNull=True) return CommandResults( readable_output=human_readable, outputs_prefix='Censys.Search', @@ -155,7 +189,7 @@ def censys_search_command(client: Client, args: Dict[str, Any]) -> CommandResult return response -def search_certs_command(client: Client, args: Dict[str, Any], query: str, limit: Optional[int]): +def search_certs_command(client: Client, args: dict[str, Any], query: str, limit: Optional[int]): fields = ['parsed.fingerprint_sha256', 'parsed.subject_dn', 'parsed.issuer_dn', 'parsed.issuer.organization', 'parsed.validity.start', 'parsed.validity.end', 'parsed.names'] search_fields = argToList(args.get('fields')) @@ -169,28 +203,207 @@ def search_certs_command(client: Client, args: Dict[str, Any], query: str, limit 'flatten': False } - res = client.censys_search_certs_request(data) - results = res.get('results', {})[:limit] + raw_response = client.censys_search_certs_request(data).get('result', {}).get('hits') + if not raw_response or not isinstance(raw_response, list): + error_msg = f"Unexpected response: 'hits' path not found in response.result. Response: {raw_response}" + raise ValueError(error_msg) + results = raw_response[:limit] for result in results: contents.append({ - 'SHA256': result.get('parsed').get('fingerprint_sha256'), - 'Issuer dn': result.get('parsed').get('issuer_dn'), - 'Subject dn': result.get('parsed').get('subject_dn'), - 'Names': result.get('parsed').get('names'), - 'Validity': result.get('parsed').get('validity'), + 'Issuer DN': result.get('parsed', {}).get('issuer_dn'), + 'Subject DN': result.get('parsed', {}).get('subject_dn'), + 'Validity not before': result.get('parsed', {}).get('validity_period', {}).get('not_before'), + 'Validity not after': result.get('parsed', {}).get('validity_period', {}).get('not_after'), + 'SHA256': result.get('fingerprint_sha256'), + 'Names': result.get('names'), 'Issuer': result.get('parsed').get('issuer'), }) - human_readable = tableToMarkdown(f'Search results for query "{query}"', contents) + human_readable = tableToMarkdown(f'Search results for query "{query}"', contents, removeNull=True) return CommandResults( readable_output=human_readable, outputs_prefix='Censys.Search', outputs_key_field='fingerprint_sha256', outputs=results, - raw_response=res + raw_response=raw_response ) +def ip_command(client: Client, args: dict, params: dict): + fields = [ + "labels", "autonomous_system.asn", "autonomous_system.name", + "autonomous_system.bgp_prefix", "autonomous_system.country_code", + "autonomous_system.description", "location.country_code", + "location.timezone", "location.province", "location.postal_code", + "location.coordinates.latitude", "location.coordinates.longitude", + "location.city", "location.continent", "location.country", "services.service_name", + "services.port", "services.transport_protocol", "services.extended_service_name", + "services.certificate", "last_updated_at", "dns.reverse_dns.names", + "operating_system.source", "operating_system.part", "operating_system.version" + ] if params.get('premium_access') else None + + ips: list = argToList(args.get('ip')) + results: List[CommandResults] = [] + execution_metrics = ExecutionMetrics() + + for ip in ips: + try: + raw_response = client.ip_reputation_request(ip, fields) + response = raw_response.get('result', {}).get('hits') + if not response or not isinstance(response, list): + error_msg = f"Unexpected response: 'hits' path not found in response.result. Response: {raw_response}" + raise ValueError(error_msg) + + hit = response[0] + dbot_score = Common.DBotScore( + indicator=ip, + indicator_type=DBotScoreType.IP, + integration_name="Censys", + score=get_dbot_score(params, hit.get('labels', [])), + reliability=params.get('integration_reliability') + ) + content = { + 'ip': hit.get('ip'), + 'asn': hit.get("autonomous_system", {}).get('asn'), + 'updated_date': hit.get('last_updated_at'), + 'geo_latitude': hit.get('location', {}).get('coordinates', {}).get('latitude'), + 'geo_longitude': hit.get('location', {}).get('coordinates', {}).get('longitude'), + 'geo_country': hit.get('location', {}).get('country'), + 'port': ', '.join([str(service.get('port')) for service in hit.get('services', [])]), + } + indicator = Common.IP(dbot_score=dbot_score, **content) + content['reputation'] = dbot_score.score + results.append(CommandResults( + outputs_prefix='Censys.IP', + outputs_key_field='IP', + readable_output=tableToMarkdown( + f'censys results for IP: {ip}', + content, headerTransform=string_to_table_header, + removeNull=True), + outputs=hit, + raw_response=raw_response, + indicator=indicator, + )) + + execution_metrics.success += 1 + except Exception as e: + should_break = handle_exceptions(e, results, execution_metrics, ip) + if should_break: + break + + if execution_metrics.metrics: + results.append(execution_metrics.metrics) + + return results + + +def domain_command(client: Client, args: dict): + domains: list = argToList(args.get('domain')) + results: List[CommandResults] = [] + execution_metrics = ExecutionMetrics() + + for domain in domains: + try: + response = client.domain_reputation_request(domain).get('result', {}) + hits = response.get('hits') + if not hits or not isinstance(hits, list): + error_msg = f"Unexpected response: 'hits' path not found in response.result. Response: {response}" + raise ValueError(error_msg) + + relationships = [EntityRelationship( + name=EntityRelationship.Relationships.RELATED_TO, + entity_a=domain, + entity_a_type='Domain', + entity_b=hit.get('ip'), + entity_b_type='IP', + reverse_name=EntityRelationship.Relationships.RELATED_TO, + brand='Censys') for hit in hits] + + dbot_score = Common.DBotScore(indicator=domain, indicator_type=DBotScoreType.DOMAIN, score=Common.DBotScore.NONE) + indicator = Common.Domain(domain=domain, dbot_score=dbot_score, relationships=relationships) + + results.append(CommandResults( + outputs_prefix='Censys.Domain', + outputs_key_field='Domain', + readable_output=tableToMarkdown( + f'Censys results for Domain: {domain}', + {'domain': domain}, headerTransform=string_to_table_header, removeNull=True), + outputs=hits, + raw_response=response, + indicator=indicator, + relationships=relationships + )) + + execution_metrics.success += 1 + except Exception as e: + should_break = handle_exceptions(e, results, execution_metrics, domain) + if should_break: + break + + if execution_metrics.metrics: + results.append(execution_metrics.metrics) + + return results + + +''' HELPER FUNCTIONS ''' + + +def handle_exceptions(e: Exception, results: list[CommandResults], execution_metrics: ExecutionMetrics, item: str): + status_code = 0 + message = str(e) + + if isinstance(e, DemistoException) and hasattr(e.res, 'status_code'): + status_code = e.res.status_code + message = e.message + + if status_code == 403 and 'quota' in message: + # Handle quota exceeded error + execution_metrics.quota_error += 1 + results.append(CommandResults(readable_output=f"Quota exceeded. Error: {message}")) + return True + + elif status_code == 429: + # Handle rate limits error + execution_metrics.general_error += 1 + results.append(CommandResults(readable_output=f"Too many requests. Error: {message}")) + return True + + elif status_code == 403 and 'specific fields' in message: + # Handle non-premium access error + raise DemistoException( + "Your user does not have permission for premium features. " + "Please ensure that you deselect the 'Labels premium feature available' option " + f"for non-premium access. Error: {message}") + + elif status_code == 401 or status_code == 403: + # Handle unauthorized access error + raise e + + else: + # Handle general error + execution_metrics.general_error += 1 + error_msg = f"An error occurred for item: {item}. Error: {message}" + results.append(CommandResults(readable_output=error_msg)) + return False + + +def get_dbot_score(params: dict, result_labels: list): + malicious_labels = set(params.get("malicious_labels", [])) + suspicious_labels = set(params.get("suspicious_labels", [])) + malicious_threshold = arg_to_number(params.get("malicious_labels_threshold")) or 0 + suspicious_threshold = arg_to_number(params.get("suspicious_labels_threshold")) or 0 + num_malicious = len(malicious_labels.intersection(result_labels)) + if num_malicious >= malicious_threshold and num_malicious > 0: + return Common.DBotScore.BAD + + num_suspicious = len(suspicious_labels.intersection(result_labels)) + if num_suspicious >= suspicious_threshold and num_suspicious > 0: + return Common.DBotScore.SUSPICIOUS + + return Common.DBotScore.NONE + + ''' MAIN FUNCTION ''' @@ -200,8 +413,8 @@ def main() -> None: password = params.get('credentials', {}).get('password') verify_certificate = not params.get('insecure', False) proxy = params.get('proxy', False) + base_url = params.get("server_url") or 'https://search.censys.io' - base_url = 'https://search.censys.io/api/' command = demisto.command() demisto.debug(f'Command being called is {command}') try: @@ -213,13 +426,16 @@ def main() -> None: if command == 'test-module': # This is the call made when pressing the integration Test button. - result = test_module(client) - return_results(result) + return_results(test_module(client, params)) elif command == 'cen-view': return_results(censys_view_command(client, demisto.args())) elif command == 'cen-search': return_results(censys_search_command(client, demisto.args())) + elif command == 'ip': + return_results(ip_command(client, demisto.args(), params)) + elif command == 'domain': + return_results(domain_command(client, demisto.args())) # Log exceptions and return errors except Exception as e: diff --git a/Packs/Censys/Integrations/CensysV2/CensysV2.yml b/Packs/Censys/Integrations/CensysV2/CensysV2.yml index e2818ef6ee76..14e8d84c483a 100644 --- a/Packs/Censys/Integrations/CensysV2/CensysV2.yml +++ b/Packs/Censys/Integrations/CensysV2/CensysV2.yml @@ -3,19 +3,116 @@ commonfields: id: CensysV2 version: -1 configuration: +- display: Server URL + name: server_url + required: true + type: 0 + defaultvalue: https://search.censys.io + section: Connect - display: App ID - displaypassword: Secret name: credentials - required: true type: 9 + required: true + displaypassword: Secret + section: Connect - display: Trust any certificate (not secure) name: insecure type: 8 required: false + section: Connect - display: Use system proxy settings name: proxy type: 8 required: false + section: Connect +- display: Determine IP score by labels (for paid subscribers) + name: premium_access + type: 8 + required: false + section: Connect + additionalinfo: |- + Censys API provides reputation data exclusively to paid subscribers. + When set to True, the integration will use labels to determine the IP score. +- display: IP Malicious labels + name: malicious_labels + type: 16 + required: false + options: + - database + - email + - file-sharing + - iot + - login-page + - network-administration + - network.device + - network.device.firewall + - network.device.soho + - network.device.vpn + - network.device.web-ui + - onion-enabled + - out-of-band-management + - remote-access + - scada + - voip + - web.control-panel.hosting + additionalinfo: |- + Used only when `Determine IP score by labels` is set. + Labels to classify IP as Malicious. + Input can be an array or comma-separated values. + section: Collect +- display: IP Suspicious labels + name: suspicious_labels + type: 16 + required: false + options: + - database + - email + - file-sharing + - iot + - login-page + - network-administration + - network.device + - network.device.firewall + - network.device.soho + - network.device.vpn + - network.device.web-ui + - onion-enabled + - out-of-band-management + - remote-access + - scada + - voip + - web.control-panel.hosting + additionalinfo: |- + Used when `Determine IP score by labels` is set. + Labels to classify IP as Suspicious. + Input can be an array or comma-separated values. + section: Collect +- display: Malicious labels threshold + name: malicious_labels_threshold + type: 0 + required: false + additionalinfo: Determines the minimum number of labels returned that are classified as malicious for IP. + section: Collect +- display: Suspicious labels threshold + name: suspicious_labels_threshold + type: 0 + required: false + additionalinfo: Determines the minimum number of labels returned that are classified as suspicious for IP. + section: Collect +- display: Source Reliability + name: integration_reliability + defaultvalue: C - Fairly reliable + type: 15 + options: + - A+ - 3rd party enrichment + - A - Completely reliable + - B - Usually reliable + - C - Fairly reliable + - D - Not usually reliable + - E - Unreliable + - F - Reliability cannot be judged + additionalinfo: Reliability of the source providing the intelligence data. + section: collect description: Censys is a search engine that allows computer scientists to ask questions about the devices and networks that compose the internet. Driven by internet-wide scanning, Censys lets researchers find specific hosts and create aggregate reports on how devices, and certificates are configured and deployed. display: Censys v2 name: CensysV2 @@ -231,13 +328,13 @@ script: description: How the certificate is validated - Domain validated (DV), Organization Validated (OV), Extended Validation (EV), or unknown. type: String - contextPath: Censys.View.parsed.validity.end - description: Timestamp of when the certificate expires. Timezone is UTC. + description: Timestamp of when the certificate expires. Time zone is UTC. type: Date - contextPath: Censys.View.parsed.validity.length description: The length of time, in seconds, that the certificate is valid. type: Number - contextPath: Censys.View.parsed.validity.start - description: Timestamp of when certificate is first valid. Timezone is UTC. + description: Timestamp of when certificate is first valid. Time zone is UTC. type: Date - contextPath: Censys.View.parsed.version description: The x.509 certificate version number. @@ -373,15 +470,220 @@ script: description: Distinguished name of the entity that the certificate belongs to. type: String - contextPath: Censys.Search.parsed.validity.end - description: Timestamp of when the certificate expires. Timezone is UTC. + description: Timestamp of when the certificate expires. Time zone is UTC. type: Date - contextPath: Censys.Search.parsed.validity.start - description: Timestamp of when the certificate is first valid. Timezone is UTC. + description: Timestamp of when the certificate is first valid. Time zone is UTC. type: Date - contextPath: Censys.Search.parsed.issuer_dn description: Distinguished name of the entity that has signed and issued the certificate. type: String - dockerimage: demisto/python3:3.10.13.84405 + - name: ip + arguments: + - name: ip + description: IP address to check. + required: true + isArray: true + type: textArea + default: true + description: Runs reputation on IPs. + outputs: + - contextPath: Censys.IP.services.port + description: The port number associated with the service running on the IP. + type: Number + - contextPath: Censys.IP.services.transport_protocol + description: The transport protocol used by the service running on the IP. + type: String + - contextPath: Censys.IP.services.extended_service_name + description: The extended name of the service running on the IP. + type: String + - contextPath: Censys.IP.services.service_name + description: The name of the service running on the IP. + type: String + - contextPath: Censys.IP.services.certificate + description: The SSL/TLS certificate associated with the service running on the IP. + type: String + - contextPath: Censys.IP.labels + description: Labels associated with the IP address. + type: String + - contextPath: Censys.IP.dns.reverse_dns.names + description: Reverse DNS names associated with the IP address. + type: String + - contextPath: Censys.IP.autonomous_system.country_code + description: The country code of the autonomous system associated with the IP address. + type: String + - contextPath: Censys.IP.autonomous_system.description + description: Description of the autonomous system associated with the IP address. + type: String + - contextPath: Censys.IP.autonomous_system.name + description: Name of the autonomous system associated with the IP address. + type: String + - contextPath: Censys.IP.autonomous_system.bgp_prefix + description: BGP prefix of the autonomous system associated with the IP address. + type: String + - contextPath: Censys.IP.autonomous_system.asn + description: Autonomous System Number (ASN) of the autonomous system associated with the IP address. + type: Number + - contextPath: Censys.IP.ip + description: The IP address. + type: String + - contextPath: Censys.IP.location.country + description: Country name of the location associated with the IP address. + type: String + - contextPath: Censys.IP.location.timezone + description: Time zone of the location associated with the IP address. + type: String + - contextPath: Censys.IP.location.province + description: Province name of the location associated with the IP address. + type: String + - contextPath: Censys.IP.location.coordinates.latitude + description: Latitude coordinate of the location associated with the IP address. + type: Number + - contextPath: Censys.IP.location.coordinates.longitude + description: Longitude coordinate of the location associated with the IP address. + type: Number + - contextPath: Censys.IP.location.continent + description: Continent name of the location associated with the IP address. + type: String + - contextPath: Censys.IP.location.postal_code + description: Postal code of the location associated with the IP address. + type: String + - contextPath: Censys.IP.location.city + description: City name of the location associated with the IP address. + type: String + - contextPath: Censys.IP.location.country_code + description: Country code of the location associated with the IP address. + type: String + - contextPath: Censys.IP.last_updated_at + description: The date and time when the information about the IP address was last updated. + type: Date + - contextPath: IP.Address + description: The IP address. + - contextPath: IP.ASN + description: The IP ASN. + - contextPath: IP.Geo.Country + description: The IP country. + - contextPath: IP.Geo.Location + description: The IP location. + - contextPath: IP.UpdatedDate + description: The IP last update. + - contextPath: IP.Port + description: The IP port. + - contextPath: DBotScore.Indicator + description: The indicator that was tested. + - contextPath: DBotScore.Type + description: The indicator type. + - contextPath: DBotScore.Score + description: The actual score. + type: Number + - contextPath: DBotScore.Reliability + description: Reliability of the source providing the intelligence data. + type: String + - contextPath: DBotScore.Vendor + description: The vendor used to calculate the score. + - name: domain + arguments: + - name: domain + description: A comma-separated list of domains to check. + required: true + isArray: true + default: true + type: textArea + description: Return all related IPs as relationships. + outputs: + - contextPath: Censys.Domain.location.postal_code + description: The postal code of the location associated with the domain. + type: String + - contextPath: Censys.Domain.location.province + description: The province name of the location associated with the domain. + type: String + - contextPath: Censys.Domain.location.country_code + description: The country code of the location associated with the domain. + type: String + - contextPath: Censys.Domain.location.timezone + description: The time zone of the location associated with the domain. + type: String + - contextPath: Censys.Domain.location.country + description: The country name of the location associated with the domain. + type: String + - contextPath: Censys.Domain.location.coordinates.longitude + description: The longitude coordinate of the location associated with the domain. + type: Number + - contextPath: Censys.Domain.location.coordinates.latitude + description: The latitude coordinate of the location associated with the domain. + type: Number + - contextPath: Censys.Domain.location.continent + description: The continent name of the location associated with the domain. + type: String + - contextPath: Censys.Domain.location.city + description: The city name of the location associated with the domain. + type: String + - contextPath: Censys.Domain.autonomous_system.country_code + description: The country code of the autonomous system associated with the domain. + type: String + - contextPath: Censys.Domain.autonomous_system.asn + description: The Autonomous System Number (ASN) associated with the domain. + type: Number + - contextPath: Censys.Domain.autonomous_system.name + description: The name of the autonomous system associated with the domain. + type: String + - contextPath: Censys.Domain.autonomous_system.bgp_prefix + description: The BGP prefix of the autonomous system associated with the domain. + type: String + - contextPath: Censys.Domain.autonomous_system.description + description: The description of the autonomous system associated with the domain. + type: String + - contextPath: Censys.Domain.services.transport_protocol + description: The transport protocol used by the service associated with the domain. + type: String + - contextPath: Censys.Domain.services.extended_service_name + description: The extended name of the service associated with the domain. + type: String + - contextPath: Censys.Domain.services.port + description: The port number associated with the service associated with the domain. + type: Number + - contextPath: Censys.Domain.services.service_name + description: The name of the service associated with the domain. + type: String + - contextPath: Censys.Domain.services.certificate + description: The SSL/TLS certificate associated with the service associated with the domain. + type: String + - contextPath: Censys.Domain.last_updated_at + description: The date and time when the information about the domain was last updated. + type: Date + - contextPath: Censys.Domain.ip + description: The IP address associated with the domain. + type: String + - contextPath: Censys.Domain.dns.reverse_dns.names + description: The reverse DNS names associated with the domain. + type: String + - contextPath: Domain.Name + description: The domain. + type: string. + - contextPath: Domain.Relationships.EntityA + description: The domain name. + type: string. + - contextPath: Domain.Relationships.EntityAType + description: The entity type. + type: string. + - contextPath: Domain.Relationships.EntityB + description: The entity B. + type: string. + - contextPath: Domain.Relationships.EntityBType + description: The entity B type. + type: string. + - contextPath: Domain.Relationships.Relationship + description: The relationship type. + type: string. + - contextPath: DBotScore.Indicator + description: The indicator that was tested. + - contextPath: DBotScore.Type + description: The indicator type. + - contextPath: DBotScore.Score + description: The actual score. + - contextPath: DBotScore.Vendor + description: The vendor used to calculate the score. + dockerimage: demisto/python3:3.10.14.92207 runonce: false script: '-' subtype: python3 diff --git a/Packs/Censys/Integrations/CensysV2/CensysV2_description.md b/Packs/Censys/Integrations/CensysV2/CensysV2_description.md index d6dcd195cad0..ba80509b8a58 100644 --- a/Packs/Censys/Integrations/CensysV2/CensysV2_description.md +++ b/Packs/Censys/Integrations/CensysV2/CensysV2_description.md @@ -1,5 +1,14 @@ ## Retrieve Your API ID and Secret + 1. Log in to the Censys console as the user for which you want to get the credentials from. 2. Click the username in the upper-right corner. 3. Under *My Account*, click the API tab. 4. Under *API Credentials* copy your **API ID** and the **Secret** and paste it to the integration configuration. + +## Rate limit + +Censys rate limits to 10 queries a day per IP for unauthenticated clients, and variable numbers per day depending on your pricing tier. + +## IP reputation command + +Censys API provides reputation data exclusively to paying customers. When set to True, the integration will use labels to determinate reputation on IPs. diff --git a/Packs/Censys/Integrations/CensysV2/CensysV2_test.py b/Packs/Censys/Integrations/CensysV2/CensysV2_test.py index 6a0b6f932f2f..07f8981ab7ea 100644 --- a/Packs/Censys/Integrations/CensysV2/CensysV2_test.py +++ b/Packs/Censys/Integrations/CensysV2/CensysV2_test.py @@ -1,41 +1,30 @@ import json -import io from CensysV2 import Client, censys_view_command, censys_search_command import pytest from CommonServerPython import DemistoException -SEARCH_HOST_OUTPUTS = [{ - 'ip': '1.0.0.0', - 'services': [ - {'port': 80, 'service_name': 'HTTP', 'transport_protocol': 'TCP'}, - {'port': 443, 'service_name': 'HTTP', 'transport_protocol': 'TCP'}], - 'location': {'continent': 'Oceania', 'country': 'Australia', 'country_code': 'AU', 'timezone': 'Australia/Sydney', - 'coordinates': {'latitude': -33.494, 'longitude': 143.2104}, 'registered_country': 'Australia', - 'registered_country_code': 'AU'}, - 'autonomous_system': {'asn': 13335, 'description': 'CLOUDFLARENET', 'bgp_prefix': '1.0.0.0/24', - 'name': 'CLOUDFLARENET', 'country_code': 'US'}}] - SEARCH_CERTS_OUTPUTS = [{ - 'parsed': - {'fingerprint_sha256': 'f3ade17dffcadd9532aeb2514f10d66e22941393725aa65366ac286df9b41234', - 'issuer': {'organization': ["Let's Encrypt"]}, - 'issuer_dn': "C=US, O=Let's Encrypt, CN=Let's Encrypt Authority X3", - 'names': ['*.45g4rg43g4fr3434g.gb.net', '45g4rg43g4fr3434g.gb.net'], - 'subject_dn': 'CN=45g4rg43g4fr3434g.gb.net', - 'validity': {'end': '2021-01-10T14:46:11Z', 'start': '2020-10-12T14:46:11Z'}}}] + "names": ["my-house-vtpvbznpmk.dynamic-m.com"], + "parsed": { + "validity_period": { + "not_after": "2024-07-03T13:17:43Z", + "not_before": "2024-04-04T13:18:43Z"}, + "issuer_dn": "C=US, O=IdenTrust, OU=HydrantID Trusted Certificate Service, CN=HydrantID Server CA O1", + "subject_dn": "C=US, ST=California, L=San Jose, O=Cisco Systems Inc., CN=my-house-vtpvbznpmk.dynamic-m.com"}, + "fingerprint_sha256": "XXXXXXXXX"}] def util_load_json(path): - with io.open(path, mode='r', encoding='utf-8') as f: + with open(path, encoding='utf-8') as f: return json.loads(f.read()) @pytest.fixture() def client(): - client = Client(base_url='https://search.censys.io/api/', auth=('test', '1234'), verify=True, proxy=False) + client = Client(base_url='https://search.censys.io/', auth=('test', '1234'), verify=True, proxy=False) return client @@ -58,7 +47,7 @@ def test_censys_host_search(requests_mock, client): requests_mock.get('https://search.censys.io/api/v2/hosts/search', json=mock_response) response = censys_search_command(client, args) assert "### Search results for query \"services.service_name:HTTP\"" in response.readable_output - assert response.outputs == SEARCH_HOST_OUTPUTS + assert response.outputs == mock_response.get('result', {}).get('hits', []) def test_censys_certs_search(requests_mock, client): @@ -73,11 +62,12 @@ def test_censys_certs_search(requests_mock, client): args = { 'index': 'certificates', 'query': "parsed.issuer.common_name: \"Let's Encrypt\"", - 'fields': ['parsed.fingerprint_sha1', 'validation.apple.valid'] + 'fields': ['parsed.fingerprint_sha1', 'validation.apple.valid'], + 'limit': 1 } mock_response = util_load_json('test_data/search_certs_response.json') - requests_mock.post('https://search.censys.io/api/v1/search/certificates', json=mock_response) + requests_mock.get('https://search.censys.io/api/v2/certificates/search', json=mock_response) response = censys_search_command(client, args) history = requests_mock.request_history[0] assert json.loads(history.text)['fields'] == ['parsed.fingerprint_sha256', 'parsed.subject_dn', @@ -145,15 +135,121 @@ def test_censys_view_cert(requests_mock, client): 'query': "9d3b51a6b80daf76e074730f19dc01e643ca0c3127d8f48be64cf3302f661234" } mock_response = util_load_json('test_data/view_cert_response.json') - requests_mock.get('https://search.censys.io/api/v1/view/certificates/9d3b51a6b80daf76e07473' + requests_mock.get('https://search.censys.io/api/v2/certificates/9d3b51a6b80daf76e07473' '0f19dc01e643ca0c3127d8f48be64cf3302f661234', json=mock_response) response = censys_view_command(client, args) assert '### Information for certificate' in response.readable_output - assert response.outputs == mock_response + assert response.outputs == mock_response.get('result') def test_test_module_valid(requests_mock, client): + """ + Given: + - A valid client + When: + - Testing the module + Then: + - Ensure the module test is successful and returns 'ok' + """ + from CensysV2 import test_module + requests_mock.get(url='https://search.censys.io/api/v2/hosts/search?q=ip=8.8.8.8', status_code=200, json="{}") + + assert test_module(client, {}) == 'ok' + + +def test_test_module_invalid(requests_mock, client): + """ + Given: + - An invalid client with specific parameters + When: + - Testing the module + Then: + - Ensure a DemistoException is raised + """ from CensysV2 import test_module - requests_mock.get(url='https://search.censys.io/api/v2/hosts/8.8.8.8', status_code=200, json="{}") + requests_mock.get(url='https://search.censys.io/api/v2/hosts/search?q=ip=8.8.8.8', status_code=200, json="{}") - assert test_module(client) == 'ok' + params = {'premium_access': False, 'malicious_labels': True} + with pytest.raises(DemistoException): + test_module(client, params) + + +def test_ip_command_multiple_ips(requests_mock, client): + """ + Given: + - Multiple IP addresses in the arguments + When: + - Running the ip_command function + Then: + - Validate the responses for each IP, including errors and quota exceeded messages + """ + from CensysV2 import ip_command + mock_response = util_load_json('test_data/ip_command_response.json') + args = {'ip': ['8.8.8.8', '8.8.8.8', '0.0.0.0', '8.8.4.4']} + requests_mock.get("/api/v2/hosts/search?q=ip=8.8.8.8", json=mock_response) + requests_mock.get("/api/v2/hosts/search?q=ip=8.8.8.8", json=mock_response) + requests_mock.get("/api/v2/hosts/search?q=ip=0.0.0.0", status_code=404, json={}) + requests_mock.get("/api/v2/hosts/search?q=ip=8.8.4.4", status_code=403, json={'message': 'quota'}) + response = ip_command(client, args, {}) + assert response[0].outputs == mock_response.get('result', {}).get('hits')[0] + assert response[1].outputs == mock_response.get('result', {}).get('hits')[0] + assert 'An error occurred for item: 0.0.0.0' in response[2].readable_output + assert 'Quota exceeded.' in response[3].readable_output + + +def test_ip_command_unauthorized_error(requests_mock, client): + """ + Given: + - An unauthorized request + When: + - Running the ip_command function + Then: + - Ensure a DemistoException is raised + """ + from CensysV2 import ip_command + args = {'ip': ['8.8.8.8']} + requests_mock.get("/api/v2/hosts/search?q=ip=8.8.8.8", status_code=401, json={}) + with pytest.raises(DemistoException): + ip_command(client, args, {}) + + +def test_ip_command_malicious_ip(requests_mock, client): + """ + Given: + - An IP address flagged as malicious + When: + - Running the ip_command function + Then: + - Ensure the correct DBot score is assigned + """ + from CensysV2 import ip_command + mock_response = util_load_json('test_data/ip_command_response.json') + args = {"ip": ['8.8.8.8']} + params = { + 'premium_access': True, + 'malicious_labels': ['database', 'email', 'file-sharing', 'iot', 'login-page'], + 'malicious_labels_threshold': 1} + requests_mock.get("/api/v2/hosts/search?q=ip=8.8.8.8", json=mock_response) + response = ip_command(client, args, params) + assert response[0].indicator.dbot_score.score == 3 + + +def test_domain_command_multiple_domains(requests_mock, client): + """ + Given: + - Multiple domain names in the arguments + When: + - Running the domain_command function + Then: + - Validate the responses for each domain, including errors + """ + from CensysV2 import domain_command + mock_response = util_load_json('test_data/domain_command_response.json') + args = {'domain': ['amazon.com', 'amazon.com', 'example.com']} + requests_mock.get("/api/v2/hosts/search?q=dns.names=amazon.com", json=mock_response) + requests_mock.get("/api/v2/hosts/search?q=dns.names=amazon.com", json=mock_response) + requests_mock.get("/api/v2/hosts/search?q=dns.names=example.com", status_code=404, json={}) + response = domain_command(client, args) + assert response[0].outputs == mock_response.get('result', {}).get('hits') + assert response[1].outputs == mock_response.get('result', {}).get('hits') + assert 'An error occurred for item: example.com' in response[2].readable_output diff --git a/Packs/Censys/Integrations/CensysV2/README.md b/Packs/Censys/Integrations/CensysV2/README.md index ba749e6dfb9a..16850323377c 100644 --- a/Packs/Censys/Integrations/CensysV2/README.md +++ b/Packs/Censys/Integrations/CensysV2/README.md @@ -16,6 +16,12 @@ If you are upgrading from a previous of this integration, see [Breaking Changes] | Secret | True | | Trust any certificate (not secure) | False | | Use system proxy settings | False | + | Labels premium feature available | False | + | IP and Domain Malicious labels | False | + | IP and Domain Suspicious labels | False | + | Malicious labels threshold | False | + | Suspicious labels threshold | False | + 4. Click **Test** to validate the URLs, token, and connection. ## Commands @@ -106,9 +112,9 @@ Returns detailed information for an IP address or SHA256 within the specified in | Censys.View.parsed.tbs_fingerprint | String | The SHA2-256 digest over the DER encoding of the certificate's TBSCertificate. | | Censys.View.parsed.tbs_noct_fingerprint | String | The SHA2-256 digest over the DER encoding of the certificate's TBSCertificate with any CT extensions omitted. | | Censys.View.parsed.validation_level | String | How the certificate is validated - Domain validated \(DV\), Organization Validated \(OV\), Extended Validation \(EV\), or unknown. | -| Censys.View.parsed.validity.end | Date | Timestamp of when the certificate expires. Timezone is UTC. | +| Censys.View.parsed.validity.end | Date | Timestamp of when the certificate expires. Time zone is UTC. | | Censys.View.parsed.validity.length | Number | The length of time, in seconds, that the certificate is valid. | -| Censys.View.parsed.validity.start | Date | Timestamp of when certificate is first valid. Timezone is UTC. | +| Censys.View.parsed.validity.start | Date | Timestamp of when certificate is first valid. Time zone is UTC. | | Censys.View.parsed.version | Number | The x.509 certificate version number. | | Censys.View.precert | Boolean | Whether the certificate is pre-cert. | | Censys.View.raw | String | The raw certificate. | @@ -445,7 +451,7 @@ Returns detailed information for an IP address or SHA256 within the specified in #### Human Readable Output >### Information for IP 8.8.8.8 ->|ASN|Bgp Prefix|Last Updated|Name|Service| +>|ASN|Routing|Last Updated|Network|Protocols| >|---|---|---|---|---| >| 15169 | 8.8.8.0/24 | 2022-08-30T06:39:12.356Z | GOOGLE | {'Port': 53, 'Service Name': 'DNS'},
{'Port': 443, 'Service Name': 'HTTP'},
{'Port': 853, 'Service Name': 'UNKNOWN'} | @@ -494,8 +500,8 @@ Returns previews of hosts matching a specified search query, or a list of certif | Censys.Search.parsed.issuer.organization | Unknown | The organization name. | | Censys.Search.parsed.names | Unknown | Common names for the entity. | | Censys.Search.parsed.subject_dn | String | Distinguished name of the entity that the certificate belongs to. | -| Censys.Search.parsed.validity.end | Date | Timestamp of when the certificate expires. Timezone is UTC. | -| Censys.Search.parsed.validity.start | Date | Timestamp of when the certificate is first valid. Timezone is UTC. | +| Censys.Search.parsed.validity.end | Date | Timestamp of when the certificate expires. Time zone is UTC. | +| Censys.Search.parsed.validity.start | Date | Timestamp of when the certificate is first valid. Time zone is UTC. | | Censys.Search.parsed.issuer_dn | String | Distinguished name of the entity that has signed and issued the certificate. | @@ -533,7 +539,7 @@ Returns previews of hosts matching a specified search query, or a list of certif #### Human Readable Output >### Search results for query "parsed.issuer.common_name: "Let's Encrypt"" ->|Issuer|Issuer dn|Names|SHA256|Subject dn|Validity| +>|Issuer|Issuer DN|Names|SHA256|Subject DN|Validity| >|---|---|---|---|---|---| >| organization: Let's Encrypt | C=US, O=Let's Encrypt, CN=Let's Encrypt Authority X3 | *.45g4rg43g4fr3434g.gb.net,
45g4rg43g4fr3434g.gb.net | f3ade17dffcadd9532aeb2514f10d66e22941393725aa65366ac286df9b442ec | CN=45g4rg43g4fr3434g.gb.net | start: 2020-10-12T14:46:11Z
end: 2021-01-10T14:46:11Z | @@ -541,3 +547,341 @@ Returns previews of hosts matching a specified search query, or a list of certif ## Additional Considerations for this Version * This version supports API v2 from Censys. * Breaking backward compatibility: The Censys v2 integration does not support *websites* searches. + + +### ip + +*** +Runs reputation on IPs. + +#### Base Command + +`ip` + +#### Input + +| **Argument Name** | **Description** | **Required** | +| --- | --- | --- | +| ip | IP address or a list of IP addresses to assess reputation. | Required | + +#### Context Output + +| **Path** | **Type** | **Description** | +| --- | --- | --- | +| Censys.IP.services.port | Number | The port number associated with the service running on the IP. | +| Censys.IP.services.transport_protocol | String | The transport protocol used by the service running on the IP. | +| Censys.IP.services.extended_service_name | String | The extended name of the service running on the IP. | +| Censys.IP.services.service_name | String | The name of the service running on the IP. | +| Censys.IP.services.certificate | String | The SSL/TLS certificate associated with the service running on the IP. | +| Censys.IP.labels | String | Labels associated with the IP address (with premium access only). | +| Censys.IP.dns.reverse_dns.names | String | Reverse DNS names associated with the IP address. | +| Censys.IP.autonomous_system.country_code | String | The country code of the autonomous system associated with the IP address. | +| Censys.IP.autonomous_system.description | String | Description of the autonomous system associated with the IP address. | +| Censys.IP.autonomous_system.name | String | Name of the autonomous system associated with the IP address. | +| Censys.IP.autonomous_system.bgp_prefix | String | BGP prefix of the autonomous system associated with the IP address. | +| Censys.IP.autonomous_system.asn | Number | Autonomous System Number (ASN) of the autonomous system associated with the IP address. | +| Censys.IP.ip | String | The IP address. | +| Censys.IP.location.country | String | Country name of the location associated with the IP address. | +| Censys.IP.location.timezone | String | Time zone of the location associated with the IP address. | +| Censys.IP.location.province | String | Province name of the location associated with the IP address. | +| Censys.IP.location.coordinates.latitude | Number | Latitude coordinate of the location associated with the IP address. | +| Censys.IP.location.coordinates.longitude | Number | Longitude coordinate of the location associated with the IP address. | +| Censys.IP.location.continent | String | Continent name of the location associated with the IP address. | +| Censys.IP.location.postal_code | String | Postal code of the location associated with the IP address. | +| Censys.IP.location.city | String | City name of the location associated with the IP address. | +| Censys.IP.location.country_code | String | Country code of the location associated with the IP address. | +| Censys.IP.last_updated_at | Date | The date and time when the information about the IP address was last updated. | +| IP.Address | unknown | The IP address. | +| IP.ASN | unknown | The IP ASN. | +| IP.Geo.Country | unknown | The IP country. | +| IP.Geo.Location | unknown | The IP location. | +| IP.UpdatedDate | unknown | The IP last update | +| IP.Port | unknown | The IP port | +| DBotScore.Indicator | unknown | The indicator that was tested. | +| DBotScore.Type | unknown | The indicator type. | +| DBotScore.Score | Number | The actual score. | +| DBotScore.Reliability | String | Reliability of the source providing the intelligence data. | +| DBotScore.Vendor | unknown | The vendor used to calculate the score. | + +#### Command example + +```!ip ip=8.8.8.8,8.8.4.4``` + +#### Context Example + +```json +{ + "services": [ + { + "port": 53, + "transport_protocol": "UDP", + "extended_service_name": "DNS", + "service_name": "DNS" + }, + { + "certificate": "5a7763efee07b08b18a4af2796bfaac46641a2f15c98e88c3d79fa9a06adfc87", + "extended_service_name": "HTTPS", + "port": 443, + "transport_protocol": "TCP", + "service_name": "HTTP" + }, + { + "service_name": "UNKNOWN", + "transport_protocol": "QUIC", + "extended_service_name": "UNKNOWN", + "port": 443 + }, + { + "transport_protocol": "TCP", + "service_name": "UNKNOWN", + "port": 853, + "certificate": "5a7763efee07b08b18a4af2796bfaac46641a2f15c98e88c3d79fa9a06adfc87", + "extended_service_name": "UNKNOWN" + } + ], + "labels": ["database","email","file-sharing","iot","login-page"], + "dns": { + "reverse_dns": { + "names": [ + "dns.google" + ] + } + }, + "autonomous_system": { + "country_code": "US", + "description": "GOOGLE", + "name": "GOOGLE", + "bgp_prefix": "8.8.8.0/24", + "asn": 15169 + }, + "ip": "8.8.8.8", + "location": { + "country": "United States", + "timezone": "America/Los_Angeles", + "province": "California", + "coordinates": { + "latitude": 37.4056, + "longitude": -122.0775 + }, + "continent": "North America", + "postal_code": "94043", + "city": "Mountain View", + "country_code": "US" + }, + "last_updated_at": "2024-04-07T02:16:23.015Z" +} +``` + +#### Human Readable Output + +>### censys results for IP: 8.8.8.8 +>| **Asn** | **Geo Country** | **Geo Latitude** | **Geo Longitude** | **Ip** | **Port** | **Reputation** | **Updated** | +>| --- | --- | --- | --- | --- | --- | --- | --- | +>| 15169 | United States | 37.4056 | -122.0775 | 8.8.8.8 | 53, 443, 443, 853 | 0 | 2024-04-14T08:03:28.159Z | + + +### domain + +*** +Return all related IPs as relationships. + +#### Base Command + +`domain` + +#### Input + +| **Argument Name** | **Description** | **Required** | +| --- | --- | --- | +| domain | Domain to check. | Required | + +#### Context Output + +| **Path** | **Type** | **Description** | +| --- | --- | --- | +| Censys.Domain.location.postal_code | String | The postal code of the location associated with the domain. | +| Censys.Domain.location.province | String | The province name of the location associated with the domain. | +| Censys.Domain.location.country_code | String | The country code of the location associated with the domain. | +| Censys.Domain.location.timezone | String | The time zone of the location associated with the domain. | +| Censys.Domain.location.country | String | The country name of the location associated with the domain. | +| Censys.Domain.location.coordinates.longitude | Number | The longitude coordinate of the location associated with the domain. | +| Censys.Domain.location.coordinates.latitude | Number | The latitude coordinate of the location associated with the domain. | +| Censys.Domain.location.continent | String | The continent name of the location associated with the domain. | +| Censys.Domain.location.city | String | The city name of the location associated with the domain. | +| Censys.Domain.autonomous_system.country_code | String | The country code of the autonomous system associated with the domain. | +| Censys.Domain.autonomous_system.asn | Number | The Autonomous System Number (ASN) associated with the domain. | +| Censys.Domain.autonomous_system.name | String | The name of the autonomous system associated with the domain. | +| Censys.Domain.autonomous_system.bgp_prefix | String | The BGP prefix of the autonomous system associated with the domain. | +| Censys.Domain.autonomous_system.description | String | The description of the autonomous system associated with the domain. | +| Censys.Domain.services.transport_protocol | String | The transport protocol used by the service associated with the domain. | +| Censys.Domain.services.extended_service_name | String | The extended name of the service associated with the domain. | +| Censys.Domain.services.port | Number | The port number associated with the service associated with the domain. | +| Censys.Domain.services.service_name | String | The name of the service associated with the domain. | +| Censys.Domain.services.certificate | String | The SSL/TLS certificate associated with the service associated with the domain. | +| Censys.Domain.last_updated_at | Date | The date and time when the information about the domain was last updated. | +| Censys.Domain.ip | String | The IP address associated with the domain. | +| Censys.Domain.dns.reverse_dns.names | String | The reverse DNS names associated with the domain. | +| Domain.Name | string. | The domain. | +| Domain.Relationships.EntityA | string. | The domain name. | +| Domain.Relationships.EntityAType | string. | The entity type. | +| Domain.Relationships.EntityB | string. | The entity B. | +| Domain.Relationships.EntityBType | string. | The entity B type. | +| Domain.Relationships.Relationship | string. | The relationship type. | +| DBotScore.Indicator | unknown | The indicator that was tested. | +| DBotScore.Type | unknown | The indicator type.| +| DBotScore.Score | unknown | The actual score. | +| DBotScore.Vendor | unknown | The vendor used to calculate the score. | + +#### Command example + +```!domain domain=amazon.com,facebook.com``` + +#### Context Example + +```json +{ + "code": 200, + "status": "OK", + "result": { + "query": "dns.names=amazon.com", + "total": 3, + "duration": 239, + "hits": [ + { + "location": { + "province": "Virginia", + "country": "United States", + "coordinates": { + "longitude": -77.48749, + "latitude": 39.04372 + }, + "timezone": "America/New_York", + "country_code": "US", + "continent": "North America", + "postal_code": "20147", + "city": "Ashburn" + }, + "autonomous_system": { + "description": "AMAZON-02", + "bgp_prefix": "1.1.1.1", + "name": "AMAZON-02", + "country_code": "US", + "asn": 16509 + }, + "services": [ + { + "port": 80, + "transport_protocol": "TCP", + "service_name": "HTTP", + "extended_service_name": "HTTP" + }, + { + "transport_protocol": "TCP", + "certificate": "XXXXXXX", + "extended_service_name": "HTTPS", + "service_name": "HTTP", + "port": 443 + } + ], + "last_updated_at": "2024-04-06T16:57:13.170Z", + "ip": "1.1.1.1." + }, + { + "ip": "1.1.1.1", + "services": [ + { + "port": 80, + "transport_protocol": "TCP", + "service_name": "HTTP", + "extended_service_name": "HTTP" + }, + { + "port": 443, + "transport_protocol": "TCP", + "extended_service_name": "HTTPS", + "service_name": "HTTP", + "certificate": "XXXXXXX" + } + ], + "dns": { + "reverse_dns": { + "names": [ + "s3-console-us-standard.console.aws.amazon.com" + ] + } + }, + "location": { + "province": "Virginia", + "postal_code": "20147", + "country": "United States", + "timezone": "America/New_York", + "continent": "North America", + "city": "Ashburn", + "country_code": "US", + "coordinates": { + "latitude": 39.04372, + "longitude": -77.48749 + } + }, + "autonomous_system": { + "country_code": "US", + "bgp_prefix": "1.1.1.1", + "asn": 16509, + "description": "AMAZON-02", + "name": "AMAZON-02" + }, + "last_updated_at": "2024-04-06T16:57:13.171Z" + }, + { + "location": { + "postal_code": "20147", + "province": "Virginia", + "country_code": "US", + "timezone": "America/New_York", + "country": "United States", + "coordinates": { + "longitude": -77.48749, + "latitude": 39.04372 + }, + "continent": "North America", + "city": "Ashburn" + }, + "last_updated_at": "2024-04-06T16:57:13.170Z", + "autonomous_system": { + "country_code": "US", + "asn": 16509, + "name": "AMAZON-02", + "bgp_prefix": "1.1.1.1", + "description": "AMAZON-02" + }, + "services": [ + { + "transport_protocol": "TCP", + "extended_service_name": "HTTP", + "port": 80, + "service_name": "HTTP" + }, + { + "extended_service_name": "HTTPS", + "transport_protocol": "TCP", + "certificate": "XXXXXX", + "service_name": "HTTP", + "port": 443 + } + ], + "ip": "1.1.1.1" + } + ], + "links": { + "next": "", + "prev": "" + } + } +} +``` + +#### Human Readable Output + +| **Domain** | +| --- | +| amazon.com | diff --git a/Packs/Censys/Integrations/CensysV2/command_examples.txt b/Packs/Censys/Integrations/CensysV2/command_examples.txt index d8db5bd07b28..faea131898fc 100644 --- a/Packs/Censys/Integrations/CensysV2/command_examples.txt +++ b/Packs/Censys/Integrations/CensysV2/command_examples.txt @@ -1,4 +1,6 @@ !cen-search index=certificates query="parsed.issuer.common_name: \"Let's Encrypt\"" limit=1 !cen-view index=ipv4 query=8.8.8.8 !cen-view index=certificates query=9d3b51a6b80daf76e074730f19dc01e643ca0c3127d8f48be64cf3302f6622cc -!cen-search index=ipv4 query="services.service_name:HTTP" limit=1 \ No newline at end of file +!cen-search index=ipv4 query="services.service_name:HTTP" limit=1 +!ip ip=8.8.8.8,8.8.4.4 +!domain domain=amazon.com,google.com \ No newline at end of file diff --git a/Packs/Censys/Integrations/CensysV2/test_data/domain_command_response.json b/Packs/Censys/Integrations/CensysV2/test_data/domain_command_response.json new file mode 100644 index 000000000000..2b05bf2c7c27 --- /dev/null +++ b/Packs/Censys/Integrations/CensysV2/test_data/domain_command_response.json @@ -0,0 +1,139 @@ +{ + "code": 200, + "status": "OK", + "result": { + "query": "dns.names=amazon.com", + "total": 3, + "duration": 239, + "hits": [ + { + "location": { + "province": "Virginia", + "country": "United States", + "coordinates": { + "longitude": -77.48749, + "latitude": 39.04372 + }, + "timezone": "America/New_York", + "country_code": "US", + "continent": "North America", + "postal_code": "20147", + "city": "Ashburn" + }, + "autonomous_system": { + "description": "AMAZON-02", + "bgp_prefix": "1.1.1.1", + "name": "AMAZON-02", + "country_code": "US", + "asn": 16509 + }, + "services": [ + { + "port": 80, + "transport_protocol": "TCP", + "service_name": "HTTP", + "extended_service_name": "HTTP" + }, + { + "transport_protocol": "TCP", + "certificate": "XXXXXX", + "extended_service_name": "HTTPS", + "service_name": "HTTP", + "port": 443 + } + ], + "last_updated_at": "2024-04-06T16:57:13.170Z", + "ip": "1.1.1.1" + }, + { + "ip": "8.8.8.8", + "services": [ + { + "port": 80, + "transport_protocol": "TCP", + "service_name": "HTTP", + "extended_service_name": "HTTP" + }, + { + "port": 443, + "transport_protocol": "TCP", + "extended_service_name": "HTTPS", + "service_name": "HTTP", + "certificate": "XXXXXX" + } + ], + "dns": { + "reverse_dns": { + "names": [ + "s3-console-us-standard.console.aws.amazon.com" + ] + } + }, + "location": { + "province": "Virginia", + "postal_code": "20147", + "country": "United States", + "timezone": "America/New_York", + "continent": "North America", + "city": "Ashburn", + "country_code": "US", + "coordinates": { + "latitude": 39.04372, + "longitude": -77.48749 + } + }, + "autonomous_system": { + "country_code": "US", + "bgp_prefix": "1.1.1.1", + "asn": 16509, + "description": "AMAZON-02", + "name": "AMAZON-02" + }, + "last_updated_at": "2024-04-06T16:57:13.171Z" + }, + { + "location": { + "postal_code": "20147", + "province": "Virginia", + "country_code": "US", + "timezone": "America/New_York", + "country": "United States", + "coordinates": { + "longitude": -77.48749, + "latitude": 39.04372 + }, + "continent": "North America", + "city": "Ashburn" + }, + "last_updated_at": "2024-04-06T16:57:13.170Z", + "autonomous_system": { + "country_code": "US", + "asn": 16509, + "name": "AMAZON-02", + "bgp_prefix": "1.1.1.1", + "description": "AMAZON-02" + }, + "services": [ + { + "transport_protocol": "TCP", + "extended_service_name": "HTTP", + "port": 80, + "service_name": "HTTP" + }, + { + "extended_service_name": "HTTPS", + "transport_protocol": "TCP", + "certificate": "XXXXXX", + "service_name": "HTTP", + "port": 443 + } + ], + "ip": "8.8.4.4" + } + ], + "links": { + "next": "", + "prev": "" + } + } + } \ No newline at end of file diff --git a/Packs/Censys/Integrations/CensysV2/test_data/ip_command_response.json b/Packs/Censys/Integrations/CensysV2/test_data/ip_command_response.json new file mode 100644 index 000000000000..fd0483f9f5ee --- /dev/null +++ b/Packs/Censys/Integrations/CensysV2/test_data/ip_command_response.json @@ -0,0 +1,75 @@ +{ + "code": 200, + "status": "OK", + "result": { + "query": "ip=8.8.8.8", + "total": 1, + "duration": 266, + "hits": [ + { + "services": [ + { + "port": 53, + "transport_protocol": "UDP", + "extended_service_name": "DNS", + "service_name": "DNS" + }, + { + "certificate": "XXXXXXX", + "extended_service_name": "HTTPS", + "port": 443, + "transport_protocol": "TCP", + "service_name": "HTTP" + }, + { + "service_name": "UNKNOWN", + "transport_protocol": "QUIC", + "extended_service_name": "UNKNOWN", + "port": 443 + }, + { + "transport_protocol": "TCP", + "service_name": "UNKNOWN", + "port": 853, + "certificate": "XXXXXXX", + "extended_service_name": "UNKNOWN" + } + ], + "labels": ["database","email","file-sharing","iot","login-page"], + "dns": { + "reverse_dns": { + "names": [ + "dns.google" + ] + } + }, + "autonomous_system": { + "country_code": "US", + "description": "GOOGLE", + "name": "GOOGLE", + "bgp_prefix": "8.8.8.8", + "asn": 15169 + }, + "ip": "8.8.8.8", + "location": { + "country": "United States", + "timezone": "America/Los_Angeles", + "province": "California", + "coordinates": { + "latitude": 37.4056, + "longitude": -122.0775 + }, + "continent": "North America", + "postal_code": "94043", + "city": "Mountain View", + "country_code": "US" + }, + "last_updated_at": "2024-04-07T02:16:23.015Z" + } + ], + "links": { + "next": "", + "prev": "" + } + } + } \ No newline at end of file diff --git a/Packs/Censys/Integrations/CensysV2/test_data/search_certs_response.json b/Packs/Censys/Integrations/CensysV2/test_data/search_certs_response.json index 04cd6977db54..e293489ed2cb 100644 --- a/Packs/Censys/Integrations/CensysV2/test_data/search_certs_response.json +++ b/Packs/Censys/Integrations/CensysV2/test_data/search_certs_response.json @@ -1,25 +1,765 @@ { - "results": [ - { - "parsed": { - "fingerprint_sha256": "f3ade17dffcadd9532aeb2514f10d66e22941393725aa65366ac286df9b41234", - "issuer": { - "organization": [ - "Let's Encrypt" - ] - }, - "issuer_dn": "C=US, O=Let's Encrypt, CN=Let's Encrypt Authority X3", - "names": [ - "*.45g4rg43g4fr3434g.gb.net", - "45g4rg43g4fr3434g.gb.net" - ], - "subject_dn": "CN=45g4rg43g4fr3434g.gb.net", - "validity": { - "end": "2021-01-10T14:46:11Z", - "start": "2020-10-12T14:46:11Z" - } + "code": 200, + "status": "OK", + "result": { + "query": "", + "total": 10235172181.0, + "duration_ms": 13820, + "hits": [ + { + "names": [ + "my-house-vtpvbznpmk.dynamic-m.com" + ], + "parsed": { + "validity_period": { + "not_after": "2024-07-03T13:17:43Z", + "not_before": "2024-04-04T13:18:43Z" + }, + "issuer_dn": "C=US, O=IdenTrust, OU=HydrantID Trusted Certificate Service, CN=HydrantID Server CA O1", + "subject_dn": "C=US, ST=California, L=San Jose, O=Cisco Systems Inc., CN=my-house-vtpvbznpmk.dynamic-m.com" + }, + "fingerprint_sha256": "XXXXXXXXX" + }, + { + "fingerprint_sha256": "XXXXXXXXX", + "names": [ + "marmaris-tpgpbkkjnd.dynamic-m.com" + ], + "parsed": { + "validity_period": { + "not_after": "2024-07-03T13:17:43Z", + "not_before": "2024-04-04T13:18:43Z" + }, + "subject_dn": "C=US, ST=California, L=San Jose, O=Cisco Systems Inc., CN=marmaris-tpgpbkkjnd.dynamic-m.com", + "issuer_dn": "C=US, O=IdenTrust, OU=HydrantID Trusted Certificate Service, CN=HydrantID Server CA O1" + } + }, + { + "names": [ + "imperva.com", + "www.spiralnotebookproject.com" + ], + "parsed": { + "issuer_dn": "C=BE, O=GlobalSign nv-sa, CN=GlobalSign Atlas R3 DV TLS CA 2024 Q1", + "subject_dn": "CN=imperva.com", + "validity_period": { + "not_after": "2024-04-05T13:15:56Z", + "not_before": "2024-04-04T13:15:56Z" + } + }, + "fingerprint_sha256": "XXXXXXXXX" + }, + { + "parsed": { + "validity_period": { + "not_after": "2024-07-03T13:14:27Z", + "not_before": "2024-04-04T13:15:27Z" + }, + "subject_dn": "C=US, ST=California, L=San Jose, O=Cisco Systems Inc., CN=eagle-farm-oob-gthgqzvdpp.dynamic-m.com", + "issuer_dn": "C=US, O=IdenTrust, OU=HydrantID Trusted Certificate Service, CN=HydrantID Server CA O1" + }, + "names": [ + "eagle-farm-oob-gthgqzvdpp.dynamic-m.com" + ], + "fingerprint_sha256": "4f1f0eb6471144e1ef22fec6fecf42b737d658681a87d448ef80baf660762f8b" + }, + { + "fingerprint_sha256": "XXXXXXXXX", + "names": [ + "*.milgam-evedge.co.il", + "imperva.com" + ], + "parsed": { + "issuer_dn": "C=BE, O=GlobalSign nv-sa, CN=GlobalSign Atlas R3 DV TLS CA 2024 Q1", + "validity_period": { + "not_before": "2024-04-04T13:15:02Z", + "not_after": "2024-04-05T13:15:02Z" + }, + "subject_dn": "CN=imperva.com" + } + }, + { + "names": [ + "quintus-firewall-se-wvdtvgdmjt.dynamic-m.com" + ], + "fingerprint_sha256": "XXXXXXXXX", + "parsed": { + "issuer_dn": "C=US, O=IdenTrust, OU=HydrantID Trusted Certificate Service, CN=HydrantID Server CA O1", + "subject_dn": "C=US, ST=California, L=San Jose, O=Cisco Systems Inc., CN=quintus-firewall-se-wvdtvgdmjt.dynamic-m.com", + "validity_period": { + "not_before": "2024-04-04T13:14:58Z", + "not_after": "2024-07-03T13:13:58Z" + } + } + }, + { + "names": [ + "quintus-firewall-se-wvdtvgdmjt.dynamic-m.com" + ], + "fingerprint_sha256": "XXXXXXXXX", + "parsed": { + "subject_dn": "C=US, ST=California, L=San Jose, O=Cisco Systems Inc., CN=quintus-firewall-se-wvdtvgdmjt.dynamic-m.com", + "validity_period": { + "not_after": "2024-07-03T13:13:58Z", + "not_before": "2024-04-04T13:14:58Z" + }, + "issuer_dn": "C=US, O=IdenTrust, OU=HydrantID Trusted Certificate Service, CN=HydrantID Server CA O1" + } + }, + { + "parsed": { + "validity_period": { + "not_before": "2024-04-04T13:14:38Z", + "not_after": "2024-07-03T13:13:38Z" + }, + "issuer_dn": "C=US, O=IdenTrust, OU=HydrantID Trusted Certificate Service, CN=HydrantID Server CA O1", + "subject_dn": "C=US, ST=California, L=San Jose, O=Cisco Systems Inc., CN=mx-gkhpkpmhdg.dynamic-m.com" + }, + "names": [ + "mx-gkhpkpmhdg.dynamic-m.com" + ], + "fingerprint_sha256": "XXXXXXXXX" + }, + { + "parsed": { + "subject_dn": "C=US, ST=California, L=San Jose, O=Cisco Systems Inc., CN=centor-eagle-farm-wired-jgkmgqntpp.dynamic-m.com", + "issuer_dn": "C=US, O=IdenTrust, OU=HydrantID Trusted Certificate Service, CN=HydrantID Server CA O1", + "validity_period": { + "not_before": "2024-04-04T13:13:57Z", + "not_after": "2024-07-03T13:12:57Z" + } + }, + "names": [ + "centor-eagle-farm-wired-jgkmgqntpp.dynamic-m.com" + ], + "fingerprint_sha256": "XXXXXXXXX" + }, + { + "fingerprint_sha256": "XXXXXXXXX", + "names": [ + "fsyr-cwrqddtzpj.dynamic-m.com" + ], + "parsed": { + "issuer_dn": "C=US, O=IdenTrust, OU=HydrantID Trusted Certificate Service, CN=HydrantID Server CA O1", + "validity_period": { + "not_after": "2024-07-03T13:12:57Z", + "not_before": "2024-04-04T13:13:57Z" + }, + "subject_dn": "C=US, ST=California, L=San Jose, O=Cisco Systems Inc., CN=fsyr-cwrqddtzpj.dynamic-m.com" + } + }, + { + "names": [ + "tes-hemi-hczjhtknnm.dynamic-m.com" + ], + "fingerprint_sha256": "XXXXXXXXX", + "parsed": { + "validity_period": { + "not_after": "2024-07-03T13:12:46Z", + "not_before": "2024-04-04T13:13:46Z" + }, + "subject_dn": "C=US, ST=California, L=San Jose, O=Cisco Systems Inc., CN=tes-hemi-hczjhtknnm.dynamic-m.com", + "issuer_dn": "C=US, O=IdenTrust, OU=HydrantID Trusted Certificate Service, CN=HydrantID Server CA O1" + } + }, + { + "names": [ + "*.bitcopai.com", + "*.tempobet.com", + "*.topopsiyon.com", + "963tempobet.com", + "964tempobet.com", + "965tempobet.com", + "966tempobet.com", + "967tempobet.com", + "968tempobet.com", + "bitcopai.com", + "imperva.com", + "tempobet.com", + "topopsiyon.com", + "www.963tempobet.com", + "www.964tempobet.com", + "www.965tempobet.com", + "www.966tempobet.com", + "www.967tempobet.com", + "www.968tempobet.com", + "www.969tempobet.com" + ], + "parsed": { + "issuer_dn": "C=BE, O=GlobalSign nv-sa, CN=GlobalSign Atlas R3 DV TLS CA 2024 Q1", + "subject_dn": "CN=imperva.com", + "validity_period": { + "not_before": "2024-04-04T13:13:44Z", + "not_after": "2024-10-01T13:13:44Z" + } + }, + "fingerprint_sha256": "XXXXXXXXX" + }, + { + "fingerprint_sha256": "XXXXXXXXX", + "names": [ + "paso-robles-qrkptzngkg.dynamic-m.com" + ], + "parsed": { + "issuer_dn": "C=US, O=IdenTrust, OU=HydrantID Trusted Certificate Service, CN=HydrantID Server CA O1", + "validity_period": { + "not_before": "2024-04-04T13:13:38Z", + "not_after": "2024-07-03T13:12:38Z" + }, + "subject_dn": "C=US, ST=California, L=San Jose, O=Cisco Systems Inc., CN=paso-robles-qrkptzngkg.dynamic-m.com" + } + }, + { + "fingerprint_sha256": "XXXXXXXXX", + "parsed": { + "subject_dn": "CN=www.nrpotolok.ru", + "validity_period": { + "not_before": "2024-04-04T13:13:25Z", + "not_after": "2024-11-04T13:13:25Z" + }, + "issuer_dn": "C=BE, O=GlobalSign nv-sa, CN=GlobalSign GCC R3 DV TLS CA 2020" + }, + "names": [ + "autodiscover.nrpotolok.ru", + "mail.nrpotolok.ru", + "nrpotolok.ru", + "owa.nrpotolok.ru", + "www.nrpotolok.ru" + ] + }, + { + "parsed": { + "issuer_dn": "C=BE, O=GlobalSign nv-sa, CN=GlobalSign Atlas R3 DV TLS CA 2024 Q1", + "subject_dn": "CN=imperva.com", + "validity_period": { + "not_after": "2024-04-05T13:12:57Z", + "not_before": "2024-04-04T13:12:57Z" + } + }, + "names": [ + "imperva.com", + "www.brtexpress.com" + ], + "fingerprint_sha256": "XXXXXXXXX" + }, + { + "parsed": { + "subject_dn": "CN=imperva.com", + "issuer_dn": "C=BE, O=GlobalSign nv-sa, CN=GlobalSign Atlas R3 DV TLS CA 2024 Q1", + "validity_period": { + "not_before": "2024-04-04T13:12:56Z", + "not_after": "2024-04-05T13:12:56Z" + } + }, + "names": [ + "imperva.com", + "www.searchskate.com" + ], + "fingerprint_sha256": "XXXXXXXXX" + }, + { + "names": [ + "*.lns-privatelabel.com", + "imperva.com" + ], + "fingerprint_sha256": "XXXXXXXXX", + "parsed": { + "subject_dn": "CN=imperva.com", + "issuer_dn": "C=BE, O=GlobalSign nv-sa, CN=GlobalSign Atlas R3 DV TLS CA 2024 Q1", + "validity_period": { + "not_before": "2024-04-04T13:12:56Z", + "not_after": "2024-04-05T13:12:56Z" + } + } + }, + { + "names": [ + "imperva.com", + "www.johnkapengapainting.com" + ], + "fingerprint_sha256": "XXXXXXXXX", + "parsed": { + "subject_dn": "CN=imperva.com", + "issuer_dn": "C=BE, O=GlobalSign nv-sa, CN=GlobalSign Atlas R3 DV TLS CA 2024 Q1", + "validity_period": { + "not_before": "2024-04-04T13:12:56Z", + "not_after": "2024-04-05T13:12:56Z" + } + } + }, + { + "names": [ + "imperva.com", + "scopriofferta.vodafone.it" + ], + "parsed": { + "subject_dn": "CN=imperva.com", + "validity_period": { + "not_after": "2024-04-05T13:12:56Z", + "not_before": "2024-04-04T13:12:56Z" + }, + "issuer_dn": "C=BE, O=GlobalSign nv-sa, CN=GlobalSign Atlas R3 DV TLS CA 2024 Q1" + }, + "fingerprint_sha256": "XXXXXXXXX" + }, + { + "parsed": { + "issuer_dn": "C=BE, O=GlobalSign nv-sa, CN=GlobalSign Atlas R3 DV TLS CA 2024 Q1", + "validity_period": { + "not_before": "2024-04-04T13:12:56Z", + "not_after": "2024-04-05T13:12:56Z" + }, + "subject_dn": "CN=imperva.com" + }, + "fingerprint_sha256": "XXXXXXXXX", + "names": [ + "imperva.com", + "www.expresslanetransports.com" + ] + }, + { + "parsed": { + "issuer_dn": "C=US, O=IdenTrust, OU=HydrantID Trusted Certificate Service, CN=HydrantID Server CA O1", + "subject_dn": "C=US, ST=California, L=San Jose, O=Cisco Systems Inc., CN=neu-cdfmgmtvpn-pkbrhkcwng.dynamic-m.com", + "validity_period": { + "not_before": "2024-04-04T13:12:44Z", + "not_after": "2024-07-03T13:11:44Z" + } + }, + "names": [ + "neu-cdfmgmtvpn-pkbrhkcwng.dynamic-m.com" + ], + "fingerprint_sha256": "XXXXXXXXX" + }, + { + "names": [ + "neu-cdfmgmtvpn-pkbrhkcwng.dynamic-m.com" + ], + "fingerprint_sha256": "XXXXXXXXX", + "parsed": { + "subject_dn": "C=US, ST=California, L=San Jose, O=Cisco Systems Inc., CN=neu-cdfmgmtvpn-pkbrhkcwng.dynamic-m.com", + "validity_period": { + "not_before": "2024-04-04T13:12:42Z", + "not_after": "2024-07-03T13:11:42Z" + }, + "issuer_dn": "C=US, O=IdenTrust, OU=HydrantID Trusted Certificate Service, CN=HydrantID Server CA O1" + } + }, + { + "fingerprint_sha256": "XXXXXXXXX", + "parsed": { + "validity_period": { + "not_before": "2024-04-04T13:12:38Z", + "not_after": "2024-07-03T13:11:38Z" + }, + "issuer_dn": "C=US, O=IdenTrust, OU=HydrantID Trusted Certificate Service, CN=HydrantID Server CA O1", + "subject_dn": "C=US, ST=California, L=San Jose, O=Cisco Systems Inc., CN=my-house-vtpvbznpmk.dynamic-m.com" + }, + "names": [ + "my-house-vtpvbznpmk.dynamic-m.com" + ] + }, + { + "names": [ + "infranet-rwi-mx84-dvcbcbrtcc.dynamic-m.com" + ], + "fingerprint_sha256": "XXXXXXXXX", + "parsed": { + "issuer_dn": "C=US, O=IdenTrust, OU=HydrantID Trusted Certificate Service, CN=HydrantID Server CA O1", + "subject_dn": "C=US, ST=California, L=San Jose, O=Cisco Systems Inc., CN=infranet-rwi-mx84-dvcbcbrtcc.dynamic-m.com", + "validity_period": { + "not_before": "2024-04-04T13:12:12Z", + "not_after": "2024-07-03T13:11:12Z" + } + } + }, + { + "parsed": { + "issuer_dn": "C=US, O=IdenTrust, OU=HydrantID Trusted Certificate Service, CN=HydrantID Server CA O1", + "validity_period": { + "not_after": "2024-07-03T13:11:12Z", + "not_before": "2024-04-04T13:12:12Z" + }, + "subject_dn": "C=US, ST=California, L=San Jose, O=Cisco Systems Inc., CN=infranet-rwi-mx84-dvcbcbrtcc.dynamic-m.com" + }, + "fingerprint_sha256": "XXXXXXXXX", + "names": [ + "infranet-rwi-mx84-dvcbcbrtcc.dynamic-m.com" + ] + }, + { + "parsed": { + "issuer_dn": "C=BE, O=GlobalSign nv-sa, CN=GlobalSign Atlas R3 DV TLS CA 2024 Q1", + "subject_dn": "CN=imperva.com", + "validity_period": { + "not_after": "2024-04-05T13:11:50Z", + "not_before": "2024-04-04T13:11:56Z" + } + }, + "names": [ + "imperva.com", + "www.gifsoutloud.com" + ], + "fingerprint_sha256": "XXXXXXXXX" + }, + { + "parsed": { + "issuer_dn": "C=BE, O=GlobalSign nv-sa, CN=GlobalSign Atlas R3 DV TLS CA 2024 Q1", + "subject_dn": "CN=imperva.com", + "validity_period": { + "not_before": "2024-04-04T13:11:50Z", + "not_after": "2024-04-05T13:11:50Z" + } + }, + "names": [ + "imperva.com", + "www.blacksuperheroesmatter.com" + ], + "fingerprint_sha256": "XXXXXXXXX" + }, + { + "names": [ + "imperva.com", + "www.ladagroupgh.com" + ], + "fingerprint_sha256": "XXXXXXXXX", + "parsed": { + "subject_dn": "CN=imperva.com", + "issuer_dn": "C=BE, O=GlobalSign nv-sa, CN=GlobalSign Atlas R3 DV TLS CA 2024 Q1", + "validity_period": { + "not_after": "2024-04-05T13:11:50Z", + "not_before": "2024-04-04T13:11:50Z" + } + } + }, + { + "names": [ + "imperva.com", + "www.splashnewmedia.net" + ], + "fingerprint_sha256": "XXXXXXXXX", + "parsed": { + "issuer_dn": "C=BE, O=GlobalSign nv-sa, CN=GlobalSign Atlas R3 DV TLS CA 2024 Q1", + "validity_period": { + "not_before": "2024-04-04T13:11:50Z", + "not_after": "2024-04-05T13:11:50Z" + }, + "subject_dn": "CN=imperva.com" + } + }, + { + "parsed": { + "issuer_dn": "C=BE, O=GlobalSign nv-sa, CN=GlobalSign Atlas R3 DV TLS CA 2024 Q1", + "subject_dn": "CN=imperva.com", + "validity_period": { + "not_after": "2024-04-05T13:11:50Z", + "not_before": "2024-04-04T13:11:50Z" + } + }, + "fingerprint_sha256": "XXXXXXXXX", + "names": [ + "imperva.com", + "www.tropicsminigolf.com" + ] + }, + { + "fingerprint_sha256": "XXXXXXXXX", + "parsed": { + "validity_period": { + "not_after": "2024-04-05T13:11:50Z", + "not_before": "2024-04-04T13:11:50Z" + }, + "issuer_dn": "C=BE, O=GlobalSign nv-sa, CN=GlobalSign Atlas R3 DV TLS CA 2024 Q1", + "subject_dn": "CN=imperva.com" + }, + "names": [ + "imperva.com", + "www.oscodarockfest.org" + ] + }, + { + "parsed": { + "validity_period": { + "not_before": "2024-04-04T13:11:50Z", + "not_after": "2024-04-05T13:11:50Z" + }, + "issuer_dn": "C=BE, O=GlobalSign nv-sa, CN=GlobalSign Atlas R3 DV TLS CA 2024 Q1", + "subject_dn": "CN=imperva.com" + }, + "names": [ + "*.integration-cb4x.fr", + "imperva.com" + ], + "fingerprint_sha256": "XXXXXXXXX" + }, + { + "names": [ + "*.sitecreateundersubaccount1712236065550.incaptest.co", + "incaptest.co" + ], + "fingerprint_sha256": "XXXXXXXXX", + "parsed": { + "subject_dn": "CN=incaptest.co", + "issuer_dn": "C=BE, O=GlobalSign nv-sa, CN=GlobalSign Atlas R3 DV TLS CA 2024 Q1", + "validity_period": { + "not_after": "2024-10-01T13:11:45Z", + "not_before": "2024-04-04T13:11:45Z" + } + } + }, + { + "parsed": { + "subject_dn": "C=US, ST=California, L=San Jose, O=Cisco Systems Inc., CN=sjc17-2-wired-rqdcvrrppk.dynamic-m.com", + "validity_period": { + "not_after": "2024-07-03T13:10:45Z", + "not_before": "2024-04-04T13:11:45Z" + }, + "issuer_dn": "C=US, O=IdenTrust, OU=HydrantID Trusted Certificate Service, CN=HydrantID Server CA O1" + }, + "names": [ + "sjc17-2-wired-rqdcvrrppk.dynamic-m.com" + ], + "fingerprint_sha256": "XXXXXXXXX" + }, + { + "names": [ + "acima-minuteman-wired-wpwdkjgtjq.dynamic-m.com" + ], + "parsed": { + "validity_period": { + "not_after": "2024-07-03T13:10:42Z", + "not_before": "2024-04-04T13:11:42Z" + }, + "subject_dn": "C=US, ST=California, L=San Jose, O=Cisco Systems Inc., CN=acima-minuteman-wired-wpwdkjgtjq.dynamic-m.com", + "issuer_dn": "C=US, O=IdenTrust, OU=HydrantID Trusted Certificate Service, CN=HydrantID Server CA O1" + }, + "fingerprint_sha256": "XXXXXXXXX" + }, + { + "fingerprint_sha256": "XXXXXXXXX", + "parsed": { + "issuer_dn": "C=BE, O=GlobalSign nv-sa, CN=GlobalSign Atlas R3 DV TLS CA 2024 Q1", + "validity_period": { + "not_before": "2024-04-04T13:11:27Z", + "not_after": "2024-04-05T13:11:01Z" + }, + "subject_dn": "CN=imperva.com" + }, + "names": [ + "creditemaibune.md", + "imperva.com" + ] + }, + { + "parsed": { + "validity_period": { + "not_after": "2024-04-05T13:11:00Z", + "not_before": "2024-04-04T13:11:27Z" + }, + "issuer_dn": "C=BE, O=GlobalSign nv-sa, CN=GlobalSign Atlas R3 DV TLS CA 2024 Q1", + "subject_dn": "CN=imperva.com" + }, + "names": [ + "*.maib.md", + "imperva.com" + ], + "fingerprint_sha256": "XXXXXXXXX" + }, + { + "fingerprint_sha256": "XXXXXXXXX", + "names": [ + "*.leasing.md", + "imperva.com" + ], + "parsed": { + "validity_period": { + "not_after": "2024-04-05T13:11:00Z", + "not_before": "2024-04-04T13:11:27Z" + }, + "issuer_dn": "C=BE, O=GlobalSign nv-sa, CN=GlobalSign Atlas R3 DV TLS CA 2024 Q1", + "subject_dn": "CN=imperva.com" + } + }, + { + "fingerprint_sha256": "XXXXXXXXX", + "names": [ + "imperva.com", + "libercard.md" + ], + "parsed": { + "validity_period": { + "not_before": "2024-04-04T13:11:27Z", + "not_after": "2024-04-05T13:11:01Z" + }, + "subject_dn": "CN=imperva.com", + "issuer_dn": "C=BE, O=GlobalSign nv-sa, CN=GlobalSign Atlas R3 DV TLS CA 2024 Q1" + } + }, + { + "names": [ + "*.alto.md", + "imperva.com" + ], + "fingerprint_sha256": "XXXXXXXXX", + "parsed": { + "subject_dn": "CN=imperva.com", + "issuer_dn": "C=BE, O=GlobalSign nv-sa, CN=GlobalSign Atlas R3 DV TLS CA 2024 Q1", + "validity_period": { + "not_before": "2024-04-04T13:11:27Z", + "not_after": "2024-04-05T13:11:00Z" + } + } + }, + { + "parsed": { + "subject_dn": "CN=imperva.com", + "issuer_dn": "C=BE, O=GlobalSign nv-sa, CN=GlobalSign Atlas R3 DV TLS CA 2024 Q1", + "validity_period": { + "not_before": "2024-04-04T13:11:27Z", + "not_after": "2024-04-05T13:11:00Z" + } + }, + "names": [ + "imperva.com", + "leasing.md" + ], + "fingerprint_sha256": "XXXXXXXXX" + }, + { + "parsed": { + "issuer_dn": "C=US, O=IdenTrust, OU=HydrantID Trusted Certificate Service, CN=HydrantID Server CA O1", + "validity_period": { + "not_after": "2024-07-03T13:10:27Z", + "not_before": "2024-04-04T13:11:27Z" + }, + "subject_dn": "C=US, ST=California, L=San Jose, O=Cisco Systems Inc., CN=centor-strykow-mzkrvkgdpp.dynamic-m.com" + }, + "names": [ + "centor-strykow-mzkrvkgdpp.dynamic-m.com" + ], + "fingerprint_sha256": "XXXXXXXXX" + }, + { + "parsed": { + "issuer_dn": "C=BE, O=GlobalSign nv-sa, CN=GlobalSign Atlas R3 DV TLS CA 2024 Q1", + "subject_dn": "CN=imperva.com", + "validity_period": { + "not_before": "2024-04-04T13:11:26Z", + "not_after": "2024-04-05T13:11:00Z" } + }, + "names": [ + "alto.md", + "imperva.com" + ], + "fingerprint_sha256": "XXXXXXXXX" + }, + { + "parsed": { + "validity_period": { + "not_before": "2024-04-04T13:11:12Z", + "not_after": "2024-07-03T13:10:12Z" + }, + "issuer_dn": "C=US, O=IdenTrust, OU=HydrantID Trusted Certificate Service, CN=HydrantID Server CA O1", + "subject_dn": "C=US, ST=California, L=San Jose, O=Cisco Systems Inc., CN=centor-strykow-mzkrvkgdpp.dynamic-m.com" + }, + "names": [ + "centor-strykow-mzkrvkgdpp.dynamic-m.com" + ], + "fingerprint_sha256": "XXXXXXXXX" + }, + { + "parsed": { + "validity_period": { + "not_after": "2024-04-05T13:10:59Z", + "not_before": "2024-04-04T13:11:07Z" + }, + "subject_dn": "CN=imperva.com", + "issuer_dn": "C=BE, O=GlobalSign nv-sa, CN=GlobalSign Atlas R3 DV TLS CA 2024 Q1" + }, + "fingerprint_sha256": "XXXXXXXXX", + "names": [ + "casahub.md", + "imperva.com" + ] + }, + { + "names": [ + "*.casahub.md", + "imperva.com" + ], + "parsed": { + "validity_period": { + "not_after": "2024-04-05T13:10:59Z", + "not_before": "2024-04-04T13:11:06Z" + }, + "issuer_dn": "C=BE, O=GlobalSign nv-sa, CN=GlobalSign Atlas R3 DV TLS CA 2024 Q1", + "subject_dn": "CN=imperva.com" + }, + "fingerprint_sha256": "XXXXXXXXX" + }, + { + "fingerprint_sha256": "XXXXXXXXX", + "parsed": { + "issuer_dn": "C=BE, O=GlobalSign nv-sa, CN=GlobalSign Atlas R3 DV TLS CA 2024 Q1", + "validity_period": { + "not_before": "2024-04-04T13:11:06Z", + "not_after": "2024-04-05T13:10:59Z" + }, + "subject_dn": "CN=imperva.com" + }, + "names": [ + "imperva.com", + "maibpay.md" + ] + }, + { + "names": [ + "*.maibpay.md", + "imperva.com" + ], + "fingerprint_sha256": "XXXXXXXXX", + "parsed": { + "issuer_dn": "C=BE, O=GlobalSign nv-sa, CN=GlobalSign Atlas R3 DV TLS CA 2024 Q1", + "subject_dn": "CN=imperva.com", + "validity_period": { + "not_after": "2024-04-05T13:10:59Z", + "not_before": "2024-04-04T13:11:05Z" + } + } + }, + { + "names": [ + "usswan-ddjjkpcnbm.dynamic-m.com" + ], + "parsed": { + "validity_period": { + "not_before": "2024-04-04T13:10:58Z", + "not_after": "2024-07-03T13:09:58Z" + }, + "subject_dn": "C=US, ST=California, L=San Jose, O=Cisco Systems Inc., CN=usswan-ddjjkpcnbm.dynamic-m.com", + "issuer_dn": "C=US, O=IdenTrust, OU=HydrantID Trusted Certificate Service, CN=HydrantID Server CA O1" + }, + "fingerprint_sha256": "XXXXXXXXX" + }, + { + "parsed": { + "validity_period": { + "not_before": "2024-04-04T13:10:45Z", + "not_after": "2024-07-03T13:09:45Z" + }, + "subject_dn": "C=US, ST=California, L=San Jose, O=Cisco Systems Inc., CN=lanaseg-wired-gnmvjjwjrv.dynamic-m.com", + "issuer_dn": "C=US, O=IdenTrust, OU=HydrantID Trusted Certificate Service, CN=HydrantID Server CA O1" + }, + "names": [ + "lanaseg-wired-gnmvjjwjrv.dynamic-m.com" + ], + "fingerprint_sha256": "XXXXXXXXX" } - ], - "status": "ok" -} \ No newline at end of file + ], + "links": { + "next": "eyJhbGciOiJFZERTQSJ9.eyJub25jZSI6InhNanFpYlhyRWx5Y29aTFkwNjhqK0N2K005SFhuTTQvU28rT0RYQnE1elkiLCJwYWdlIjoyLCJyZXZlcnNlZCI6ZmFsc2UsInNlYXJjaF9hZnRlciI6WzEuMCwxLDE3MTIyMzYyNDUwMDAsIjQyOGUxNmJmMTQ2ZGZhODg0ZWZkNTQyMzdjYzNjNTA3MGQxMzQ1MjdjZWQ5NDQ1ZDI2MjBjOTU4YzgzYzM1YmQiXSwic29ydCI6W3siX3Njb3JlIjp7Im9yZGVyIjoiZGVzYyJ9fSx7InZhbGlkYXRpb24uY2hyb21lLmlzX3ZhbGlkIjp7Im1pc3NpbmciOiJfbGFzdCIsIm1vZGUiOiJtaW4iLCJvcmRlciI6ImRlc2MifX0seyJwYXJzZWQudmFsaWRpdHlfcGVyaW9kLm5vdF9iZWZvcmUiOnsibWlzc2luZyI6Il9sYXN0IiwibW9kZSI6Im1pbiIsIm9yZGVyIjoiZGVzYyJ9fSx7ImZpbmdlcnByaW50X3NoYTI1Ni5fX3JhdyI6eyJtaXNzaW5nIjoiX2xhc3QiLCJtb2RlIjoibWluIiwib3JkZXIiOiJhc2MifX1dLCJ2ZXJzaW9uIjoxfQ.OzbkkI7IB3gKrKfipdX1XQ0NYJF7dTAB5fBe17VRwflSQ4pW2bF0rf45vaOqbjqdYX7gu9TPGwSv9bCvRI6GDw", + "prev": "" + } + } + } \ No newline at end of file diff --git a/Packs/Censys/Integrations/CensysV2/test_data/search_host_response.json b/Packs/Censys/Integrations/CensysV2/test_data/search_host_response.json index 55dc578f158f..bd675e8e9728 100644 --- a/Packs/Censys/Integrations/CensysV2/test_data/search_host_response.json +++ b/Packs/Censys/Integrations/CensysV2/test_data/search_host_response.json @@ -1,48 +1,91 @@ { "code": 200, + "status": "OK", "result": { - "hits": [ + "query": "8.8.4.4", + "total": 3139, + "duration": 361, + "hits": [ + { + "services": [ { - "autonomous_system": { - "asn": 13335, - "bgp_prefix": "1.0.0.0/24", - "country_code": "US", - "description": "CLOUDFLARENET", - "name": "CLOUDFLARENET" - }, - "ip": "1.0.0.0", - "location": { - "continent": "Oceania", - "coordinates": { - "latitude": -33.494, - "longitude": 143.2104 - }, - "country": "Australia", - "country_code": "AU", - "registered_country": "Australia", - "registered_country_code": "AU", - "timezone": "Australia/Sydney" - }, - "services": [ - { - "port": 80, - "service_name": "HTTP", - "transport_protocol": "TCP" - }, - { - "port": 443, - "service_name": "HTTP", - "transport_protocol": "TCP" - } - ] + "service_name": "DCERPC", + "extended_service_name": "DCERPC", + "transport_protocol": "TCP", + "port": 135 + }, + { + "port": 139, + "extended_service_name": "NETBIOS", + "service_name": "NETBIOS", + "transport_protocol": "TCP" + }, + { + "service_name": "SMB", + "port": 445, + "extended_service_name": "SMB", + "transport_protocol": "TCP" + }, + { + "certificate": "XXXXXX", + "extended_service_name": "RDP", + "port": 3389, + "transport_protocol": "TCP", + "service_name": "RDP" + }, + { + "service_name": "HTTP", + "port": 5985, + "transport_protocol": "TCP", + "extended_service_name": "HTTP" + }, + { + "service_name": "HTTP", + "port": 47001, + "extended_service_name": "HTTP", + "transport_protocol": "TCP" } - ], - "links": { - "next": "eyJBZnRlciI6WyIxLjAuMC40OSIsIlx1MDAzY25pbFx1MDAzZSIsIjAuMzk4MDY2NTUiXSwiUmV2ZXJzZSI6ZmFsc2V9", - "prev": "" - }, - "query": "services.service_name:HTTP", - "total": 182406783 - }, - "status": "OK" -} \ No newline at end of file + ], + "autonomous_system": { + "name": "ROSTELECOM-AS", + "bgp_prefix": "1.1.1.1", + "country_code": "RU", + "asn": 12389, + "description": "ROSTELECOM-AS" + }, + "ip": "1.1.1.1", + "location": { + "country": "Russia", + "province": "Moscow Oblast", + "country_code": "RU", + "timezone": "Europe/Moscow", + "coordinates": { + "longitude": 37.42848, + "latitude": 55.9001 + }, + "city": "Khimki", + "continent": "Europe", + "postal_code": "141400" + }, + "operating_system": { + "other": [ + + ], + "component_uniform_resource_identifiers": [ + + ], + "cpe": "cpe:2.3:o:microsoft:windows:*:*:*:*:*:*:*:*", + "vendor": "microsoft", + "part": "o", + "product": "windows", + "source": "OSI_TRANSPORT_LAYER" + }, + "last_updated_at": "2024-04-04T07:46:53.795Z" + } + ], + "links": { + "next": "eyJhbGciOiJFZERTQSJ9.eyJub25jZSI6IlpzTFJrR3hsYm1GTjF2VS9LNHFUUHNTMDNNK3A0TUR4NmVrZjZQaWpiU3MiLCJwYWdlIjoyLCJyZXZlcnNlZCI6ZmFsc2UsInNlYXJjaF9hZnRlciI6WzUyLjY2ODQsMTcxMjIxNjgxMzc5NSwiMTg4LjI1NC4wLjkiLG51bGxdLCJzb3J0IjpbeyJfc2NvcmUiOnsib3JkZXIiOiJkZXNjIn19LHsibGFzdF91cGRhdGVkX2F0Ijp7Im1pc3NpbmciOiJfbGFzdCIsIm1vZGUiOiJtaW4iLCJvcmRlciI6ImRlc2MifX0seyJpcCI6eyJtaXNzaW5nIjoiX2xhc3QiLCJtb2RlIjoibWluIiwib3JkZXIiOiJhc2MifX0seyJuYW1lLl9fcmF3Ijp7Im1pc3NpbmciOiJfbGFzdCIsIm1vZGUiOiJtaW4iLCJvcmRlciI6ImFzYyJ9fV0sInZlcnNpb24iOjF9.73VnbstzQPRlyCF2NXFno_YZiHBDuMDshc8Yq7PNSw6Ozuaz596TDSyzBd6hUt4v3chM_97zNmf3x0ey_A5IDQ", + "prev": "" + } + } + } \ No newline at end of file diff --git a/Packs/Censys/Integrations/CensysV2/test_data/view_cert_response.json b/Packs/Censys/Integrations/CensysV2/test_data/view_cert_response.json index dcbd33f4d2d9..76b4ecfef4d8 100644 --- a/Packs/Censys/Integrations/CensysV2/test_data/view_cert_response.json +++ b/Packs/Censys/Integrations/CensysV2/test_data/view_cert_response.json @@ -1,248 +1,424 @@ { - "ct": { - "digicert_ct1": { - "added_to_ct_at": "2015-09-29T19:55:46.232Z", - "ct_to_censys_at": "2018-07-30T04:49:40.404877527Z", - "index": 165790 + "code": 200, + "status": "OK", + "result": { + "_encoding": { + "fingerprint_sha256": "DISPLAY_HEX", + "fingerprint_sha1": "DISPLAY_HEX", + "fingerprint_md5": "DISPLAY_HEX", + "tbs_fingerprint_sha256": "DISPLAY_HEX", + "tbs_no_ct_fingerprint_sha256": "DISPLAY_HEX", + "spki_fingerprint_sha256": "DISPLAY_HEX", + "parent_spki_fingerprint_sha256": "DISPLAY_HEX", + "raw": "DISPLAY_BASE64", + "spki_subject_fingerprint_sha256": "DISPLAY_HEX", + "parent_spki_subject_fingerprint_sha256": "DISPLAY_HEX" + }, + "fingerprint_sha256": "XXXXXX", + "fingerprint_sha1": "XXXXXXX", + "fingerprint_md5": "XXXXXXX", + "tbs_fingerprint_sha256": "XXXXXXX", + "tbs_no_ct_fingerprint_sha256": "XXXXXX", + "spki_fingerprint_sha256": "XXXXXXX", + "parent_spki_fingerprint_sha256": "XXXXXXX", + "parsed": { + "version": 3, + "serial_number": "190655745167071791176803125260949407349", + "issuer_dn": "C=US, O=Google Trust Services LLC, CN=GTS CA 1C3", + "issuer": { + "common_name": [ + "GTS CA 1C3" + ], + "country": [ + "US" + ], + "organization": [ + "Google Trust Services LLC" + ] }, - "google_aviator": { - "added_to_ct_at": "1970-01-01T00:00:00Z", - "ct_to_censys_at": "1970-01-01T00:00:00Z", - "index": 8713649 + "subject_dn": "CN=dns.google", + "subject": { + "common_name": [ + "dns.google" + ] }, - "google_pilot": { - "added_to_ct_at": "2015-09-29T19:55:45.785Z", - "ct_to_censys_at": "2018-07-30T15:23:48.617288146Z", - "index": 9498499 + "subject_key_info": { + "key_algorithm": { + "name": "RSA", + "oid": "1.2.840.113549.1.1.1" + }, + "rsa": { + "exponent": 65537, + "_encoding": { + "modulus": "DISPLAY_HEX" + }, + "modulus": "aeddd3fdd3529e2d2ad2718a63c37e589b0df49c89528676fd961ac4f6c0d9ebea6fd97aaf1dd351d666ff731400e3923b886b25d1dbfce0cae6349fba4c576ef2b788f7dab079f0903db2602f23decddde8d403ca5367589207c20593d858546a27e2d6343d8dc85c88d303f9c9722cd150948e7493117e7691e25d6ed7c8339c2864620bdceb41e6a79a6e817376936235489241f3e5938534741ceb17a0aaeb8a1bd15eba2980337ea9b660139fa4f386ab6620265344258c65a924b4b4bbb2542a96800ec7a6e461ec1e9c89351830f123a89087c0905aa6e08baf77dacf205183ba497483d9096e2eb648e99007c92dae32c68ca41004eca30b61ff3f97", + "length": 2048 + }, + "_encoding": { + "fingerprint_sha256": "DISPLAY_HEX" + }, + "fingerprint_sha256": "XXXXXX", + "_key": "rsa" }, - "google_rocketeer": { - "added_to_ct_at": "2015-09-29T19:55:45.704Z", - "ct_to_censys_at": "2018-07-30T15:17:12.304312851Z", - "index": 6663198 + "validity_period": { + "not_before": "2023-02-08T04:36:32Z", + "not_after": "2023-05-03T04:36:31Z", + "length_seconds": 7257599 }, - "nordu_ct_plausible": { - "added_to_ct_at": "2015-10-19T23:17:33.123Z", - "ct_to_censys_at": "2018-07-30T19:53:59.431410229Z", - "index": 5744025 + "signature": { + "signature_algorithm": { + "name": "SHA256-RSA", + "oid": "1.2.840.113549.1.1.11" + }, + "_encoding": { + "value": "DISPLAY_HEX" + }, + "value": "d86194d2afb08af2b382af951338b39a55a5ffc0ebbe679d9f72e7972107616a0f4a66157d3fa82463355b6b8b311e52b628ab40f88aa7ba6e51f3c8ae7af380f7c27b23e4145f8d1079d670b147217f5dad90f8d4c698b11062764041819b5369c62020377444fdbf46e3b6b27f8f0e37026f65a9597abf5414a66ba4e19d46f924bef660ab86bdc5a1fb99976600816be0f909b943c1fa33288a1b75ec0b5b32bda0eaa455ea2e7c83a6238ed737697db80de6672ca5856e53fbf0f74fb99b79920404a85f923888cce29ea41a244cf77cbce2fcf098ef77a5c87c6860fb8b5df54aa71111acd22170bcbeabb4dd4bd7e9e6be3430505035ca4e2751142665", + "valid": true, + "self_signed": false }, - "symantec_ws_ct": { - "added_to_ct_at": "2015-09-29T19:55:46.004Z", - "ct_to_censys_at": "2018-07-30T04:22:53.736190633Z", - "index": 6913 - } - }, - "fingerprint_sha256": "9d3b51a6b80daf76e074730f19dc01e643ca0c3127d8f48be64cf3302f612345", - "metadata": { - "added_at": "1970-01-01T00:00:00Z", - "parse_status": "success", - "parse_version": 1, - "post_processed": true, - "post_processed_at": "2021-06-22T01:40:32Z", - "seen_in_scan": true, - "source": "scan", - "updated_at": "2021-06-22T03:28:34Z" - }, - "parent_spki_subject_fingerprint": "ec0c72ce7689150e4f62d04f51f0f19713f77cf27ff43cab4035e9e54e812345", - "parsed": { "extensions": { - "authority_info_access": { - "issuer_urls": [ - "http://pki.google.com/GIAG2.crt" - ], - "ocsp_urls": [ - "http://clients1.google.com/ocsp" - ] + "key_usage": { + "digital_signature": true, + "key_encipherment": true, + "value": 5, + "content_commitment": false, + "data_encipherment": false, + "key_agreement": false, + "certificate_sign": false, + "crl_sign": false, + "encipher_only": false, + "decipher_only": false + }, + "basic_constraints": { + "is_ca": false + }, + "subject_alt_name": { + "dns_names": [ + "dns.google", + "dns.google.com", + "*.dns.google.com", + "8888.google", + "dns64.dns.google" + ], + "ip_addresses": [ + "8.8.8.8", + "8.8.4.4", + "1.1.1.1", + "1.1.1.1", + "1.1.1.1", + "1.1.1.1" + ] + }, + "crl_distribution_points": [ + "XXXXXX" + ], + "_encoding": { + "authority_key_id": "DISPLAY_HEX", + "subject_key_id": "DISPLAY_HEX" + }, + "authority_key_id": "XXXXXX", + "subject_key_id": "XXXXXXX", + "extended_key_usage": { + "server_auth": true, + "apple_code_signing": false, + "apple_code_signing_development": false, + "apple_software_update_signing": false, + "apple_code_signing_third_party": false, + "apple_resource_signing": false, + "apple_ichat_signing": false, + "apple_ichat_encryption": false, + "apple_system_identity": false, + "apple_crypto_env": false, + "apple_crypto_production_env": false, + "apple_crypto_maintenance_env": false, + "apple_crypto_test_env": false, + "apple_crypto_development_env": false, + "apple_crypto_qos": false, + "apple_crypto_tier0_qos": false, + "apple_crypto_tier1_qos": false, + "apple_crypto_tier2_qos": false, + "apple_crypto_tier3_qos": false, + "microsoft_cert_trust_list_signing": false, + "microsoft_qualified_subordinate": false, + "microsoft_key_recovery_3": false, + "microsoft_document_signing": false, + "microsoft_lifetime_signing": false, + "microsoft_mobile_device_software": false, + "microsoft_smart_display": false, + "microsoft_csp_signature": false, + "microsoft_timestamp_signing": false, + "microsoft_server_gated_crypto": false, + "microsoft_sgc_serialized": false, + "microsoft_encrypted_file_system": false, + "microsoft_efs_recovery": false, + "microsoft_whql_crypto": false, + "microsoft_nt5_crypto": false, + "microsoft_oem_whql_crypto": false, + "microsoft_embedded_nt_crypto": false, + "microsoft_root_list_signer": false, + "microsoft_drm": false, + "microsoft_drm_individualization": false, + "microsoft_licenses": false, + "microsoft_license_server": false, + "microsoft_enrollment_agent": false, + "microsoft_smartcard_logon": false, + "microsoft_ca_exchange": false, + "microsoft_key_recovery_21": false, + "microsoft_system_health": false, + "microsoft_system_health_loophole": false, + "microsoft_kernel_mode_code_signing": false, + "dvcs": false, + "sbgp_cert_aa_service_auth": false, + "eap_over_ppp": false, + "eap_over_lan": false, + "client_auth": false, + "code_signing": false, + "email_protection": false, + "ipsec_end_system": false, + "ipsec_tunnel": false, + "ipsec_user": false, + "time_stamping": false, + "ocsp_signing": false, + "ipsec_intermediate_system_usage": false, + "netscape_server_gated_crypto": false, + "any": false + }, + "certificate_policies": [ + { + "id": "1.2.3.4.5" }, - "authority_key_id": "4add06161bbcf668b576f581b6bb621aba5a1234", - "basic_constraints": { - "is_ca": false + { + "id": "1.2.3.4.5" + } + ], + "authority_info_access": { + "ocsp_urls": [ + "XXXXXX" + ], + "issuer_urls": [ + "XXXXXX" + ] + }, + "signed_certificate_timestamps": [ + { + "_encoding": { + "log_id": "DISPLAY_HEX" + }, + "log_id": "XXXXXX", + "timestamp": "2023-02-08T05:36:33Z", + "signature": { + "hash_algorithm": "SHA256", + "signature_algorithm": "ECDSA", + "_encoding": { + "signature": "DISPLAY_HEX" + }, + "signature": "XXXXXXXX" + }, + "version": 0 }, - "certificate_policies": [ - { - "id": "1.2.3.4.4.1.11129.2.5.1" + { + "_encoding": { + "log_id": "DISPLAY_HEX" + }, + "log_id": "XXXXXX", + "timestamp": "2023-02-08T05:36:33Z", + "signature": { + "hash_algorithm": "SHA256", + "signature_algorithm": "ECDSA", + "_encoding": { + "signature": "DISPLAY_HEX" }, - { - "id": "2.3.4.1.2.2" - } - ], - "crl_distribution_points": [ - "http://pki.google.com/GIAG2.crl" - ], - "extended_key_usage": { - "client_auth": true, - "server_auth": true + "signature": "XXXXXXXX" + }, + "version": 0 + } + ], + "ct_poison": false + }, + "serial_number_hex": "8f6ef5fb7d4463d21249a8dea5284675", + "redacted": false + }, + "names": [ + "*.dns.google.com", + "8.8.4.4", + "8.8.8.8", + "8888.google", + "dns.google", + "dns.google.com", + "dns64.dns.google" + ], + "validation_level": "DV", + "validation": { + "nss": { + "ever_valid": true, + "had_trusted_path": true, + "chains": [ + { + "_encoding": { + "sha256fp": "DISPLAY_HEX" + }, + "sha256fp": [ + "XXXXXXX", + "XXXXXXX" + ] }, - "key_usage": { - "digital_signature": true, - "value": 1 + { + "_encoding": { + "sha256fp": "DISPLAY_HEX" + }, + "sha256fp": [ + "XXXXXXX", + "XXXXXXX" + ] + } + ], + "_encoding": { + "parents": "DISPLAY_HEX" + }, + "parents": [ + "XXXXXXX" + ], + "type": "LEAF", + "is_valid": false, + "has_trusted_path": false, + "in_revocation_set": false + }, + "microsoft": { + "ever_valid": true, + "had_trusted_path": true, + "chains": [ + { + "_encoding": { + "sha256fp": "DISPLAY_HEX" + }, + "sha256fp": [ + "XXXXXXX", + "XXXXXXX" + ] }, - "subject_alt_name": { - "dns_names": [ - "*.google.com", - "*.android.com" - ], - "redacted": false, - "serial_number": "5878999135690490607", - "signature": { - "self_signed": false, - "signature_algorithm": { - "name": "SHA256-RSA", - "oid": "1.2.840.113549.1.1.11" - }, - "valid": false, - "value": "Value" - }, - "signature_algorithm": { - "name": "SHA256-RSA", - "oid": "1.2.840.113549.1.1.11" - }, - "spki_subject_fingerprint": "5eb06b1c29ced84998d3d35a80fa17d3d39e4de96d25539485aecd6360f12345", - "subject": { - "common_name": [ - "*.google.com" - ], - "country": [ - "US" - ], - "locality": [ - "Mountain View" - ], - "organization": [ - "Google Inc" - ], - "province": [ - "California" - ] - }, - "subject_dn": "C=US, ST=California, L=Mountain View, O=Google Inc, CN=*.google.com", - "subject_key_info": { - "ecdsa_public_key": {}, - "fingerprint_sha256": "3d4a4bd778be7965e90a13ac361e1ed7836d24c15cd5c093f9cc7e7857f51234", - "key_algorithm": { - "name": "ECDSA" - } - }, - "tbs_fingerprint": "1661b59eb7d8cda44f800fabc9ef69ba01506309eedf027f2270105afd11234", - "tbs_noct_fingerprint": "1661b59eb7d8cda44f800fabc9ef69ba01506309eedf027f2270105afd11234", - "validation_level": "OV", - "validity": { - "end": "2015-12-28T00:00:00Z", - "length": 7708840, - "start": "2015-09-29T18:39:20Z" - }, - "version": 3 + { + "_encoding": { + "sha256fp": "DISPLAY_HEX" + }, + "sha256fp": [ + "XXXXXXX", + "XXXXXXX", + "XXXXXXX" + ] + } + ], + "_encoding": { + "parents": "DISPLAY_HEX" + }, + "parents": [ + "XXXXXXX" + ], + "type": "LEAF", + "is_valid": false, + "has_trusted_path": false, + "in_revocation_set": false + }, + "apple": { + "ever_valid": true, + "had_trusted_path": true, + "chains": [ + { + "_encoding": { + "sha256fp": "DISPLAY_HEX" + }, + "sha256fp": [ + "XXXXXXX", + "XXXXXXX" + ] }, - "precert": false, - "raw": "veryLongString", - "tags": [ - "ct", - "expired", - "was-trusted", - "ov" - ], - "validation": { - "apple": { - "blacklisted": false, - "had_trusted_path": true, - "in_revocation_set": false, - "parents": [ - "c3f697a92a293d86f9a3ee7ccb970e20e0050b8728cc83ed1b996ce9005d4c36", - "9f630426df1d8abfd80ace98871ba833ab9742cb34838de2b5285ed54c0c7dcc" - ], - "paths": [ - [ - "9d3b51a6b80daf76e074730f19dc01e643ca0c3127d8f48be64cf3302f6622cc", - "a4124fdaf9cac7baee1cab32e3225d746500c09f3cf3ebb253ef3fbb088afd34", - "ff856a2d251dcd88d36656f450126798cfabaade40799c722de4d2b5db36a73a" - ], - [ - "9d3b51a6b80daf76e074730f19dc01e643ca0c3127d8f48be64cf3302f6622cc", - "44336eb05c6c783dc177217a9f6fef75f4524e98045b390803ae9de69eb42b08", - "ff856a2d251dcd88d36656f450126798cfabaade40799c722de4d2b5db36a73a" - ] - ], - "trusted_path": false, - "type": "leaf", - "valid": false, - "was_valid": true, - "whitelisted": false - }, - "google_ct_primary": { - "blacklisted": false, - "had_trusted_path": true, - "in_revocation_set": false, - "parents": [ - "c3f697a92a293d86f9a3ee7ccb970e20e0050b8728cc83ed1b996ce9005d4c36", - "9f630426df1d8abfd80ace98871ba833ab9742cb34838de2b5285ed54c0c7dcc" - ], - "paths": [ - [ - "9d3b51a6b80daf76e074730f19dc01e643ca0c3127d8f48be64cf3302f6622cc", - "a4124fdaf9cac7baee1cab32e3225d746500c09f3cf3ebb253ef3fbb088afd34", - "ff856a2d251dcd88d36656f450126798cfabaade40799c722de4d2b5db36a73a" - ], - [ - "9d3b51a6b80daf76e074730f19dc01e643ca0c3127d8f48be64cf3302f6622cc", - "44336eb05c6c783dc177217a9f6fef75f4524e98045b390803ae9de69eb42b08", - "ff856a2d251dcd88d36656f450126798cfabaade40799c722de4d2b5db36a73a" - ] - ], - "trusted_path": false, - "type": "leaf", - "valid": false, - "was_valid": true, - "whitelisted": false - }, - "microsoft": { - "blacklisted": false, - "had_trusted_path": true, - "in_revocation_set": false, - "parents": [ - "c3f697a92a293d86f9a3ee7ccb970e20e0050b8728cc83ed1b996ce9005d4c36", - "9f630426df1d8abfd80ace98871ba833ab9742cb34838de2b5285ed54c0c7dcc" - ], - "paths": [ - [ - "9d3b51a6b80daf76e074730f19dc01e643ca0c3127d8f48be64cf3302f6622cc", - "a4124fdaf9cac7baee1cab32e3225d746500c09f3cf3ebb253ef3fbb088afd34", - "ff856a2d251dcd88d36656f450126798cfabaade40799c722de4d2b5db36a73a" - ], - [ - "9d3b51a6b80daf76e074730f19dc01e643ca0c3127d8f48be64cf3302f6622cc", - "44336eb05c6c783dc177217a9f6fef75f4524e98045b390803ae9de69eb42b08", - "ff856a2d251dcd88d36656f450126798cfabaade40799c722de4d2b5db36a73a" - ] - ], - "trusted_path": false, - "type": "leaf", - "valid": false, - "was_valid": true, - "whitelisted": false - }, - "nss": { - "blacklisted": false, - "had_trusted_path": false, - "in_revocation_set": false, - "paths": [], - "trusted_path": false, - "type": "unknown", - "valid": false, - "was_valid": false, - "whitelisted": false - }, - "revoked": false + { + "_encoding": { + "sha256fp": "DISPLAY_HEX" + }, + "sha256fp": [ + "XXXXXXX", + "XXXXXXX", + "XXXXXXX" + ] + } + ], + "_encoding": { + "parents": "DISPLAY_HEX" + }, + "parents": [ + "XXXXXXX" + ], + "type": "LEAF", + "is_valid": false, + "has_trusted_path": false, + "in_revocation_set": false + }, + "chrome": { + "ever_valid": true, + "had_trusted_path": true, + "chains": [ + { + "_encoding": { + "sha256fp": "DISPLAY_HEX" + }, + "sha256fp": [ + "23ecb03eec17338c4e33a6b48a41dc3cda12281bbc3ff813c0589d6cc2387522", + "d947432abde7b7fa90fc2e6b59101b1280e0e1c7e4e40fa3c6887fff57a7f4cf" + ] }, - "zlint": { - "errors_present": false, - "fatals_present": false, - "lints": { - "n_subject_common_name_included": true, - "w_ext_key_usage_not_critical": true - }, - "notices_present": true, - "version": 3, - "warnings_present": true + { + "_encoding": { + "sha256fp": "DISPLAY_HEX" + }, + "sha256fp": [ + "XXXXXX", + "XXXXXXX", + "XXXXXXX" + ] } + ], + "_encoding": { + "parents": "DISPLAY_HEX" + }, + "parents": [ + "XXXXXXX" + ], + "type": "LEAF", + "is_valid": false, + "has_trusted_path": false, + "in_revocation_set": false } + }, + "ever_seen_in_scan": true, + "raw": "XXXXXXX", + "added_at": "2023-02-21T23:10:55Z", + "modified_at": "2024-01-23T14:45:42Z", + "validated_at": "2023-09-09T02:28:49Z", + "parse_status": "CERTIFICATE_PARSE_STATUS_SUCCESS", + "zlint": { + "version": 3, + "timestamp": "2023-09-09T02:28:49Z", + "notices_present": true, + "failed_lints": [ + "n_subject_common_name_included" + ], + "warnings_present": false, + "errors_present": false, + "fatals_present": false + }, + "spki_subject_fingerprint_sha256": "XXXXXX", + "parent_spki_subject_fingerprint_sha256": "XXXXXX", + "precert": false, + "revoked": false, + "labels": [ + "was-trusted", + "dv", + "ever-trusted", + "expired", + "leaf", + "untrusted" + ] } -} \ No newline at end of file + } \ No newline at end of file diff --git a/Packs/Censys/Integrations/CensysV2/test_data/view_host_response.json b/Packs/Censys/Integrations/CensysV2/test_data/view_host_response.json index 3405acf4d3b8..ca676c41c0ae 100644 --- a/Packs/Censys/Integrations/CensysV2/test_data/view_host_response.json +++ b/Packs/Censys/Integrations/CensysV2/test_data/view_host_response.json @@ -1,202 +1,69 @@ { "code": 200, + "status": "OK", "result": { - "autonomous_system": { - "asn": 15169, - "bgp_prefix": "8.8.8.0/24", - "country_code": "US", - "description": "GOOGLE", - "name": "GOOGLE" + "ip": "8.8.8.8", + "services": [ + + ], + "location": { + "continent": "North America", + "country": "United States", + "country_code": "US", + "city": "Ashburn", + "postal_code": "20147", + "timezone": "America/New_York", + "province": "Virginia", + "coordinates": { + "latitude": 39.04372, + "longitude": -77.48749 + } + }, + "location_updated_at": "2024-04-04T15:53:55.455881270Z", + "autonomous_system": { + "asn": 14618, + "description": "AMAZON-AES", + "bgp_prefix": "1.1.1.1", + "name": "AMAZON-AES", + "country_code": "US" + }, + "autonomous_system_updated_at": "2024-04-04T15:53:55.455881270Z", + "whois": { + "network": { + "handle": "AMAZON", + "name": "Amazon Technologies Inc." }, - "autonomous_system_updated_at": "2021-11-21T12:57:11.200575Z", - "dns": { - "names": [ - "wiki.leadershipnigeria.com.", - "uuu.mkppy.site.", - "hisports.club." - - ], - "records": { - "1508cleveland.duckdns.org": { - "record_type": "A", - "resolved_at": "2021-10-02T06:16:39.231714247Z" - }, - "albertogozzi.it": { - "record_type": "A", - "resolved_at": "2021-10-02T01:15:04.162523844Z" - }, - "alpha.lab.toshokan.fr": { - "record_type": "A", - "resolved_at": "2021-10-03T14:18:01.127044067Z" - } - }, - "reverse_dns": { - "names": [ - "dns.google" - ], - "resolved_at": "2021-11-19T14:46:47.044806032Z" + "organization": { + "handle": "AT-88-Z", + "name": "Amazon Technologies Inc.", + "street": "410 Terry Ave N.", + "city": "Seattle", + "state": "WA", + "postal_code": "98109", + "country": "US", + "abuse_contacts": [ + { + "handle": "AEA8-ARIN", + "name": "Amazon EC2 Abuse", + "email": "" } - }, - "ip": "8.8.8.8", - "last_updated_at": "2021-12-05T13:47:02.691Z", - "location": { - "continent": "North America", - "coordinates": { - "latitude": 37.751, - "longitude": -97.822 - }, - "country": "United States", - "country_code": "US", - "postal_code": "", - "registered_country": "United States", - "registered_country_code": "US", - "timezone": "America/LA" - }, - "location_updated_at": "2021-11-26T17:14:23.038540Z", - "services": [ + ], + "admin_contacts": [ { - "_decoded": "dns", - "dns": { - "answers": [ - { - "name": "ip.parrotdns.com.", - "response": "2.2.2.2", - "type": "A" - }, - { - "name": "ip.parrotdns.com.", - "response": "1.1.1.1", - "type": "A" - } - ], - "edns": { - "do": true, - "udp": 512, - "version": 0 - }, - "questions": [ - { - "name": "ip.parrotdns.com.", - "response": ";ip.parrotdns.com.\tIN\t A", - "type": "A" - } - ], - "r_code": "SUCCESS", - "resolves_correctly": true, - "server_type": "FORWARDING" - }, - "extended_service_name": "DNS", - "observed_at": "2021-12-05T13:47:02.604625718Z", - "perspective_id": "PERSPECTIVE_HE", - "port": 53, - "service_name": "DNS", - "source_ip": "1.2.3.48", - "transport_protocol": "UDP", - "truncated": false - }, + "handle": "IPMAN40-ARIN", + "name": "IP Management", + "email": "" + } + ], + "tech_contacts": [ { - "_decoded": "http", - "_encoding": { - "banner": "DISPLAY_UTF8", - "banner_hex": "DISPLAY_HEX", - "certificate": "DISPLAY_HEX" - }, - "certificate": "bb9648a9935fe0d07ba4e1c341286382d54a75e79ac1564988bd78e20cb81234", - "extended_service_name": "HTTPS", - "observed_at": "2021-12-05T09:16:41.181804929Z", - "perspective_id": "PERSPECTIVE_NTT", - "port": 443, - "service_name": "HTTP", - "source_ip": "1.2.3.4", - "tls": { - "certificates": { - "_encoding": { - "chain_fps_sha_256": "DISPLAY_HEX", - "leaf_fp_sha_256": "DISPLAY_HEX" - }, - "chain": [ - { - "fingerprint": "23ecb03eec17338c4e33a6b48a41dc3cda12281bbc3ff813c0589d6cc2381234", - "issuer_dn": "C=US, O=Google Trust Services LLC, CN=GTS Root R1", - "subject_dn": "C=US, O=Google Trust Services LLC, CN=GTS CA 1C3" - }, - { - "fingerprint": "3ee0278df71fa3c125c4cd487f01d774694e6fc57e0cd94c24efd76913391234", - "issuer_dn": "C=BE, O=GlobalSign nv-sa, OU=Root CA, CN=GlobalSign Root CA", - "subject_dn": "C=US, O=Google Trust Services LLC, CN=GTS Root R1" - } - ], - "chain_fps_sha_256": [ - "23ecb03eec17338c4e33a6b48a41dc3cda12281bbc3ff813c0589d6cc2381234", - "3ee0278df71fa3c125c4cd487f01d774694e6fc57e0cd94c24efd76913391123" - ], - "leaf_data": { - "fingerprint": "bb9648a9935fe0d07ba4e1c341286382d54a75e79ac1564988bd78e20cb81234", - "issuer": { - "common_name": [ - "GTS CA 1C3" - ], - "country": [ - "US" - ], - "organization": [ - "Google Trust Services LLC" - ] - }, - "issuer_dn": "C=US, O=Google Trust Services LLC, CN=GTS CA 1C3", - "names": [ - "*.dns.google.com", - "8.8.4.4", - "8.8.8.8", - "8888.google", - "dns.google", - "dns.google.com", - "dns64.dns.google" - ], - "pubkey_algorithm": "RSA", - "pubkey_bit_size": 2048, - "public_key": { - "fingerprint": "eb975485cb4281ae832fb5ebd210c58be57c57fddab0631b30eec783730a1234", - "key_algorithm": "RSA", - "rsa": { - "_encoding": { - "exponent": "DISPLAY_BASE64", - "modulus": "DISPLAY_BASE64" - }, - "exponent": "AAEAAQ==", - "length": 256 - } - }, - "signature": { - "self_signed": false, - "signature_algorithm": "SHA256-RSA" - }, - "subject": { - "common_name": [ - "dns.google" - ] - }, - "subject_dn": "CN=dns.google", - "tbs_fingerprint": "7f2e4098c54f11e6d1f1ea679716525852f819b12fdd443f4074f862ff751234" - }, - "leaf_fp_sha_256": "bb9648a9935fe0d07ba4e1c341286382d54a75e79ac1564988bd78e20cb81234" - }, - "cipher_selected": "TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256", - "server_key_exchange": { - "ec_params": { - "named_curve": 23 - } - }, - "session_ticket": { - "length": 221, - "lifetime_hint": 100800 - }, - "version_selected": "TLSv1_2" - }, - "transport_protocol": "TCP", - "truncated": false + "handle": "ANO24-ARIN", + "name": "Amazon EC2 Network Operations", + "email": "" } - ] - }, - "status": "OK" -} + ] + } + }, + "last_updated_at": "2022-10-02T14:42:17.327Z" + } + } \ No newline at end of file diff --git a/Packs/Censys/ReleaseNotes/2_0_29.md b/Packs/Censys/ReleaseNotes/2_0_29.md new file mode 100644 index 000000000000..e1a8fc6e628e --- /dev/null +++ b/Packs/Censys/ReleaseNotes/2_0_29.md @@ -0,0 +1,17 @@ +#### Integrations + +##### Censys v2 + +- Added the following new parameters: + - *Server URL* - The server URL. + - *Determine IP score by labels (for paid subscribers)* - Instructs the integration to use labels to determine the IP score. + - *IP Malicious labels* - Labels to classify IP as Malicious. + - *IP Suspicious labels* - Labels to classify IP as Suspicious. + - *Malicious labels threshold* - Determines the minimum number of labels returned that are classified as malicious for IP. + - *Suspicious labels threshold* - Determines the minimum number of labels returned that are classified as suspicious for IP. + - *Source Reliability* - Reliability of the source providing the intelligence data. +- Updated the API endpoint to Censys v2. +- Added the following new commands: + - ***ip*** to allowing users to check the reputation of IP addresses. + - ***domain*** which retrieves all IP addresses related to a given domain. +- Updated the Docker image to: *demisto/python3:3.10.14.92207*. diff --git a/Packs/Censys/TestPlaybooks/CensysV2-Test.yml b/Packs/Censys/TestPlaybooks/CensysV2-Test.yml index 17116ffd089f..fc8c918538c5 100644 --- a/Packs/Censys/TestPlaybooks/CensysV2-Test.yml +++ b/Packs/Censys/TestPlaybooks/CensysV2-Test.yml @@ -5,10 +5,10 @@ starttaskid: "0" tasks: "0": id: "0" - taskid: e0b636e0-7589-4795-84a0-239b759f0a76 + taskid: 2f6915d9-8bf7-4e82-871e-117bbd305ea1 type: start task: - id: e0b636e0-7589-4795-84a0-239b759f0a76 + id: 2f6915d9-8bf7-4e82-871e-117bbd305ea1 version: -1 name: "" iscommand: false @@ -32,12 +32,13 @@ tasks: quietmode: 0 isoversize: false isautoswitchedtoquietmode: false + continueonerrortype: "" "1": id: "1" - taskid: c3c4c7e0-95d8-49c7-8370-b7fd8af76e7f + taskid: db89ebd4-67f4-45a7-8941-62b71fed31a1 type: regular task: - id: c3c4c7e0-95d8-49c7-8370-b7fd8af76e7f + id: db89ebd4-67f4-45a7-8941-62b71fed31a1 version: -1 name: Delete Context description: Delete field from context @@ -66,16 +67,16 @@ tasks: quietmode: 0 isoversize: false isautoswitchedtoquietmode: false + continueonerrortype: "" "2": id: "2" - taskid: 9551c808-8e0e-47b4-8865-788625ca916c + taskid: f5e8ce3d-463c-47d2-8a5a-46c30df914d0 type: regular task: - id: 9551c808-8e0e-47b4-8865-788625ca916c + id: f5e8ce3d-463c-47d2-8a5a-46c30df914d0 version: -1 name: Censys View Host - description: Returns detailed information for an IP address or SHA256 within - the specified index. + description: Returns detailed information for an IP address or SHA256 within the specified index. script: '|||cen-view' type: regular iscommand: true @@ -103,12 +104,13 @@ tasks: quietmode: 0 isoversize: false isautoswitchedtoquietmode: false + continueonerrortype: "" "3": id: "3" - taskid: 2e3b2466-aec0-4c2a-8665-54499c5fedd7 + taskid: dc449e1f-418b-4890-8e83-7c727d277a07 type: condition task: - id: 2e3b2466-aec0-4c2a-8665-54499c5fedd7 + id: dc449e1f-418b-4890-8e83-7c727d277a07 version: -1 name: Check Outputs type: condition @@ -145,16 +147,16 @@ tasks: quietmode: 0 isoversize: false isautoswitchedtoquietmode: false + continueonerrortype: "" "4": id: "4" - taskid: 08cffc3a-c662-48e8-8030-af21aa29fb1d + taskid: 2de82fc2-b4cf-442d-8636-caf82109eada type: regular task: - id: 08cffc3a-c662-48e8-8030-af21aa29fb1d + id: 2de82fc2-b4cf-442d-8636-caf82109eada version: -1 name: Censys Certs View - description: Returns detailed information for an IP address or SHA256 within - the specified index. + description: Returns detailed information for an IP address or SHA256 within the specified index. script: '|||cen-view' type: regular iscommand: true @@ -182,12 +184,13 @@ tasks: quietmode: 0 isoversize: false isautoswitchedtoquietmode: false + continueonerrortype: "" "5": id: "5" - taskid: 8d2b1a3d-cf1a-4a1b-888b-4990c687f2c1 + taskid: 0cc15261-56c4-4e23-85d3-7d512e8adbb2 type: condition task: - id: 8d2b1a3d-cf1a-4a1b-888b-4990c687f2c1 + id: 0cc15261-56c4-4e23-85d3-7d512e8adbb2 version: -1 name: Check Outputs type: condition @@ -200,19 +203,19 @@ tasks: conditions: - label: "yes" condition: - - - operator: isNotEmpty - left: - value: - simple: Censys.View.parsed - iscontext: true - - operator: isEqualString left: value: - simple: Censys.View.fingerprint_sha256 + simple: Censys.View.[1].fingerprint_sha256 iscontext: true right: value: simple: 9d3b51a6b80daf76e074730f19dc01e643ca0c3127d8f48be64cf3302f6622cc + - - operator: isNotEmpty + left: + value: + simple: Censys.View.[1].parsed + iscontext: true view: |- { "position": { @@ -227,16 +230,16 @@ tasks: quietmode: 0 isoversize: false isautoswitchedtoquietmode: false + continueonerrortype: "" "6": id: "6" - taskid: 4b941d4f-a3a0-47ff-8eb7-2cdf1d04e7b3 + taskid: 2a75d0b3-ea92-4489-81e7-ea57931e46e5 type: regular task: - id: 4b941d4f-a3a0-47ff-8eb7-2cdf1d04e7b3 + id: 2a75d0b3-ea92-4489-81e7-ea57931e46e5 version: -1 name: Censys search host - description: Returns previews of hosts matching a specified search query or - a list of certificates that match the given query. + description: Returns previews of hosts matching a specified search query or a list of certificates that match the given query. script: '|||cen-search' type: regular iscommand: true @@ -266,12 +269,13 @@ tasks: quietmode: 0 isoversize: false isautoswitchedtoquietmode: false + continueonerrortype: "" "7": id: "7" - taskid: a8935a5c-f3fe-4c81-8f4d-ea0f4ee4b051 + taskid: a6ea9caf-a05f-41b3-8910-2201f539cb71 type: condition task: - id: a8935a5c-f3fe-4c81-8f4d-ea0f4ee4b051 + id: a6ea9caf-a05f-41b3-8910-2201f539cb71 version: -1 name: Check Outputs type: condition @@ -308,16 +312,16 @@ tasks: quietmode: 0 isoversize: false isautoswitchedtoquietmode: false + continueonerrortype: "" "8": id: "8" - taskid: 5c97bd74-b11c-4e87-8ecc-46372801c63b + taskid: 4e9447b6-9b21-4db3-865c-73563cfde116 type: regular task: - id: 5c97bd74-b11c-4e87-8ecc-46372801c63b + id: 4e9447b6-9b21-4db3-865c-73563cfde116 version: -1 name: Censys Certs Search - description: Returns previews of hosts matching a specified search query or - a list of certificates that match the given query. + description: Returns previews of hosts matching a specified search query or a list of certificates that match the given query. script: '|||cen-search' type: regular iscommand: true @@ -347,12 +351,13 @@ tasks: quietmode: 0 isoversize: false isautoswitchedtoquietmode: false + continueonerrortype: "" "9": id: "9" - taskid: 0c33078e-b17e-45d8-884c-0a815af48f11 + taskid: 2d5a862a-ef3e-477d-86c1-5b52b6175cd8 type: condition task: - id: 0c33078e-b17e-45d8-884c-0a815af48f11 + id: 2d5a862a-ef3e-477d-86c1-5b52b6175cd8 version: -1 name: Check Outputs type: condition @@ -360,7 +365,7 @@ tasks: brand: "" nexttasks: "yes": - - "10" + - "11" separatecontext: false conditions: - label: "yes" @@ -384,20 +389,56 @@ tasks: quietmode: 0 isoversize: false isautoswitchedtoquietmode: false + continueonerrortype: "" "10": id: "10" - taskid: 6425f7d8-9f66-4266-889e-a19e1634c48c - type: regular + taskid: 0d836439-1f27-4c82-8180-1be3ec39d2b8 + type: title task: - id: 6425f7d8-9f66-4266-889e-a19e1634c48c + id: 0d836439-1f27-4c82-8180-1be3ec39d2b8 version: -1 - name: Close Investigation + name: Done description: commands.local.cmd.close.inv - script: Builtin|||closeInvestigation + type: title + iscommand: false + brand: Builtin + separatecontext: false + view: |- + { + "position": { + "x": 450, + "y": 2560 + } + } + note: false + timertriggers: [] + ignoreworker: false + skipunavailable: false + quietmode: 0 + isoversize: false + isautoswitchedtoquietmode: false + continueonerrortype: "" + "11": + id: "11" + taskid: 97b54730-84dc-40ad-8759-3a46427997bc + type: regular + task: + id: 97b54730-84dc-40ad-8759-3a46427997bc + version: -1 + name: IP + description: Runs reputation on IPs + script: '|||ip' type: regular iscommand: true - brand: Builtin + brand: "" + nexttasks: + '#none#': + - "13" + scriptarguments: + ip: + simple: 8.8.8.8 separatecontext: false + continueonerrortype: "" view: |- { "position": { @@ -412,12 +453,137 @@ tasks: quietmode: 0 isoversize: false isautoswitchedtoquietmode: false + "13": + id: "13" + taskid: 9b6cebfb-20a8-40d2-8ea1-f1c5dabed367 + type: condition + task: + id: 9b6cebfb-20a8-40d2-8ea1-f1c5dabed367 + version: -1 + name: Check Outputs + type: condition + iscommand: false + brand: "" + nexttasks: + "yes": + - "14" + separatecontext: false + conditions: + - label: "yes" + condition: + - - operator: isNotEmpty + left: + value: + simple: Censys.IP.autonomous_system + iscontext: true + right: + value: {} + - - operator: isNotEmpty + left: + value: + simple: Censys.IP.location + iscontext: true + continueonerrortype: "" + view: |- + { + "position": { + "x": 450, + "y": 2040 + } + } + note: false + timertriggers: [] + ignoreworker: false + skipunavailable: false + quietmode: 0 + isoversize: false + isautoswitchedtoquietmode: false + "14": + id: "14" + taskid: dbd5bdf8-d4e4-4d78-8ec9-cb3a4cbb10f2 + type: regular + task: + id: dbd5bdf8-d4e4-4d78-8ec9-cb3a4cbb10f2 + version: -1 + name: Domain + description: Checks the reputation of a domain. + script: '|||domain' + type: regular + iscommand: true + brand: "" + nexttasks: + '#none#': + - "15" + scriptarguments: + domain: + simple: amazon.com + separatecontext: false + continueonerrortype: "" + view: |- + { + "position": { + "x": 450, + "y": 2210 + } + } + note: false + timertriggers: [] + ignoreworker: false + skipunavailable: false + quietmode: 0 + isoversize: false + isautoswitchedtoquietmode: false + "15": + id: "15" + taskid: e7192ce9-d537-40e2-81be-45aec0af6b4b + type: condition + task: + id: e7192ce9-d537-40e2-81be-45aec0af6b4b + version: -1 + name: Check Outputs + type: condition + iscommand: false + brand: "" + nexttasks: + "yes": + - "10" + separatecontext: false + conditions: + - label: "yes" + condition: + - - operator: isNotEmpty + left: + value: + simple: Censys.Domain.autonomous_system + iscontext: true + right: + value: {} + - - operator: isNotEmpty + left: + value: + simple: Censys.Domain.location + iscontext: true + continueonerrortype: "" + view: |- + { + "position": { + "x": 450, + "y": 2390 + } + } + note: false + timertriggers: [] + ignoreworker: false + skipunavailable: false + quietmode: 0 + isoversize: false + isautoswitchedtoquietmode: false view: |- { "linkLabelsPosition": {}, "paper": { "dimensions": { - "height": 1915, + "height": 2575, "width": 380, "x": 450, "y": 50 diff --git a/Packs/Censys/pack_metadata.json b/Packs/Censys/pack_metadata.json index 5396df970432..2b44b209fb72 100644 --- a/Packs/Censys/pack_metadata.json +++ b/Packs/Censys/pack_metadata.json @@ -2,7 +2,7 @@ "name": "Censys", "description": "Censys is a search engine that allows computer scientists to ask questions about the devices and networks that compose the Internet. Driven by Internet-wide scanning, Censys lets researchers find specific hosts and create aggregate reports on how devices, websites, and certificates are configured and deployed.", "support": "xsoar", - "currentVersion": "2.0.28", + "currentVersion": "2.0.29", "author": "Cortex XSOAR", "url": "https://www.paloaltonetworks.com/cortex", "email": "", diff --git a/Packs/CheckPointHEC/Integrations/CheckPointHEC/CheckPointHEC.py b/Packs/CheckPointHEC/Integrations/CheckPointHEC/CheckPointHEC.py index d632d5b18e59..493327ba477a 100644 --- a/Packs/CheckPointHEC/Integrations/CheckPointHEC/CheckPointHEC.py +++ b/Packs/CheckPointHEC/Integrations/CheckPointHEC/CheckPointHEC.py @@ -9,6 +9,7 @@ urllib3.disable_warnings() DATE_FORMAT = '%Y-%m-%dT%H:%M:%SZ' +FETCH_INTERVAL_DEFAULT = 1 MAX_FETCH_DEFAULT = 10 SAAS_NAMES = [ 'office365_emails', @@ -415,11 +416,14 @@ def fetch_incidents(client: Client, first_fetch: str, saas_apps: List[str], stat def checkpointhec_get_entity(client: Client, entity: str) -> CommandResults: result = client.get_entity(entity) - if entities := result['responseData']: + if entities := result.get('responseData'): + entity = entities[0]['entityPayload'] + human_readable = tableToMarkdown('entity', entity, removeNull=True) return CommandResults( outputs_prefix='CheckPointHEC.Entity', outputs_key_field='internetMessageId', - outputs=entities[0]['entityPayload'] + readable_output=human_readable, + outputs=entity, ) else: return CommandResults( @@ -434,11 +438,14 @@ def checkpointhec_get_events(client: Client, start_date: str, end_date: str = No start_date=start_date, end_date=end_date, saas_apps=saas_apps, states=states, severities=severities, threat_types=threat_types ) - if events := result['responseData']: + if events := result.get('responseData'): + _events = events[:min(limit, len(events))] + human_readable = tableToMarkdown('events', _events, removeNull=True) return CommandResults( outputs_prefix='CheckPointHEC.Event', outputs_key_field='eventId', - outputs=events[:min(limit, len(events))] + readable_output=human_readable, + outputs=_events, ) else: return CommandResults( @@ -449,7 +456,7 @@ def checkpointhec_get_events(client: Client, start_date: str, end_date: str = No def checkpointhec_get_scan_info(client: Client, entity: str) -> CommandResults: result = client.get_entity(entity) outputs = {} - if entities := result['responseData']: + if entities := result.get('responseData'): sec_result = entities[0]['entitySecurityResult'] for tool, verdict in sec_result['combinedVerdict'].items(): if verdict not in (None, 'clean'): @@ -539,7 +546,7 @@ def checkpointhec_search_emails(client: Client, date_last: str = None, date_from def checkpointhec_send_action(client: Client, entities: list, entity_type: str, action: str) -> CommandResults: result = client.send_action(entities, entity_type, action) - if resp := result['responseData']: + if resp := result.get('responseData'): return CommandResults( outputs_prefix='CheckPointHEC.Task', outputs={'task': resp[0]['taskId']} @@ -552,7 +559,7 @@ def checkpointhec_send_action(client: Client, entities: list, entity_type: str, def checkpointhec_get_action_result(client: Client, task: str) -> CommandResults: result = client.get_task(task) - if resp := result['responseData']: + if resp := result.get('responseData'): return CommandResults( outputs_prefix='CheckPointHEC.ActionResult', outputs=resp @@ -605,7 +612,7 @@ def main() -> None: # pragma: no cover 'severities': [SEVERITY_VALUES.get(x.lower()) for x in argToList(params.get('event_severity'))], 'threat_types': [x.lower().replace(' ', '_') for x in argToList(params.get('threat_type'))], 'max_fetch': int(params.get('max_fetch', MAX_FETCH_DEFAULT)), - 'fetch_interval': params.get('incidentFetchInterval'), + 'fetch_interval': int(params.get('incidentFetchInterval', FETCH_INTERVAL_DEFAULT)), } fetch_incidents(client, **kwargs) elif command == 'checkpointhec-get-entity': diff --git a/Packs/CheckPointHEC/Integrations/CheckPointHEC/CheckPointHEC.yml b/Packs/CheckPointHEC/Integrations/CheckPointHEC/CheckPointHEC.yml index 8ad018354c8d..36dcc14ba689 100644 --- a/Packs/CheckPointHEC/Integrations/CheckPointHEC/CheckPointHEC.yml +++ b/Packs/CheckPointHEC/Integrations/CheckPointHEC/CheckPointHEC.yml @@ -638,7 +638,7 @@ script: script: '-' type: python subtype: python3 - dockerimage: demisto/python3:3.10.14.91134 + dockerimage: demisto/python3:3.10.14.95137 fromversion: 6.9.0 tests: - No tests (auto formatted) diff --git a/Packs/CheckPointHEC/Integrations/CheckPointHEC/README.md b/Packs/CheckPointHEC/Integrations/CheckPointHEC/README.md index 5f3804da383d..476ad138e534 100644 --- a/Packs/CheckPointHEC/Integrations/CheckPointHEC/README.md +++ b/Packs/CheckPointHEC/Integrations/CheckPointHEC/README.md @@ -1,5 +1,5 @@ The Best Way to Protect Enterprise Email & Collaboration from phishing, malware, account takeover, data loss, etc. -This integration was integrated and tested with version 1.1.0 of CheckPointHEC +This integration was integrated and tested with version 1.1.2 of CheckPointHEC ## Configure Check Point Harmony Email and Collaboration (HEC) on Cortex XSOAR @@ -27,6 +27,7 @@ This integration was integrated and tested with version 1.1.0 of CheckPointHEC 4. Click **Test** to validate the URLs, token, and connection. + ## Commands You can execute these commands from the Cortex XSOAR CLI, as part of an automation, or in a playbook. @@ -349,4 +350,3 @@ Retrieve security events. | CheckPointHEC.Event.actions | unknown | Performed actions related to the security event. | | CheckPointHEC.Event.senderAddress | String | Sender of email related to the security event. | | CheckPointHEC.Event.entityLink | String | Email link. | - diff --git a/Packs/CheckPointHEC/ReleaseNotes/1_1_1.md b/Packs/CheckPointHEC/ReleaseNotes/1_1_1.md new file mode 100644 index 000000000000..1792eee763a4 --- /dev/null +++ b/Packs/CheckPointHEC/ReleaseNotes/1_1_1.md @@ -0,0 +1,6 @@ +#### Integrations + +##### Check Point Harmony Email and Collaboration (HEC) + +- Fixed an issue on fetching incidents. + diff --git a/Packs/CheckPointHEC/ReleaseNotes/1_1_2.md b/Packs/CheckPointHEC/ReleaseNotes/1_1_2.md new file mode 100644 index 000000000000..61cb616f8020 --- /dev/null +++ b/Packs/CheckPointHEC/ReleaseNotes/1_1_2.md @@ -0,0 +1,27 @@ + +#### Scripts + +##### SendCPAction + +- Fixed an issue where the script would not work properly when there are multiple instances running. +- Updated the Docker image to: *demisto/python3:3.10.14.95137*. + +##### ShowCPEmailInfo + +- Fixed an issue where the script would not work properly when there are multiple instances running. +- Updated the Docker image to: *demisto/python3:3.10.14.95137*. + +##### ShowCPScanInfo + +- Fixed an issue where the script would not work properly when there are multiple instances running. +- Updated the Docker image to: *demisto/python3:3.10.14.95137*. + + +#### Integrations + +##### Check Point Harmony Email and Collaboration (HEC) + +- Updated the human-readable section in the following commands: + - **checkpointhec_get_entity** + - **checkpointhec_get_events** +- Updated the Docker image to: *demisto/python3:3.10.14.95137*. diff --git a/Packs/CheckPointHEC/Scripts/SendCPAction/SendCPAction.py b/Packs/CheckPointHEC/Scripts/SendCPAction/SendCPAction.py index 56cbab980d44..ef243833b075 100644 --- a/Packs/CheckPointHEC/Scripts/SendCPAction/SendCPAction.py +++ b/Packs/CheckPointHEC/Scripts/SendCPAction/SendCPAction.py @@ -1,12 +1,13 @@ from CommonServerPython import * -def send_action_and_update_incident(entity: str, action: str): +def send_action_and_update_incident(entity: str, action: str, incident: str): result = demisto.executeCommand( "checkpointhec-send-action", { 'entity': entity, 'action': action, + 'using': incident } ) demisto.executeCommand( @@ -22,10 +23,11 @@ def send_action_and_update_incident(entity: str, action: str): def main(): # pragma: no cover try: + incident = demisto.incident()['sourceInstance'] args = demisto.args() entity = args.get('entity') action = args.get('action') - return_results(send_action_and_update_incident(entity, action)) + return_results(send_action_and_update_incident(entity, action, incident)) except Exception as ex: demisto.error(traceback.format_exc()) return_error(f'Failed to execute BaseScript. Error: {str(ex)}') diff --git a/Packs/CheckPointHEC/Scripts/SendCPAction/SendCPAction.yml b/Packs/CheckPointHEC/Scripts/SendCPAction/SendCPAction.yml index 97ae8386a505..7c5c36d3de39 100644 --- a/Packs/CheckPointHEC/Scripts/SendCPAction/SendCPAction.yml +++ b/Packs/CheckPointHEC/Scripts/SendCPAction/SendCPAction.yml @@ -25,7 +25,7 @@ dependson: must: - CheckPointHEC|||checkpointhec-send-action runonce: false -dockerimage: demisto/python3:3.10.14.91134 +dockerimage: demisto/python3:3.10.14.95137 runas: DBotWeakRole fromversion: 6.9.0 tests: diff --git a/Packs/CheckPointHEC/Scripts/SendCPAction/SendCPAction_test.py b/Packs/CheckPointHEC/Scripts/SendCPAction/SendCPAction_test.py index 89bfedfda279..a75531af48d6 100644 --- a/Packs/CheckPointHEC/Scripts/SendCPAction/SendCPAction_test.py +++ b/Packs/CheckPointHEC/Scripts/SendCPAction/SendCPAction_test.py @@ -14,5 +14,5 @@ def execute_command(name, args): mocker.patch.object(demisto, 'executeCommand', side_effect=execute_command) - result = send_action_and_update_incident('0000', 'quarantine') + result = send_action_and_update_incident('0000', 'quarantine', 'CheckPointHEC-instance-1') assert result == [{'Contents': {'task': 1}}] diff --git a/Packs/CheckPointHEC/Scripts/ShowCPEmailInfo/ShowCPEmailInfo.py b/Packs/CheckPointHEC/Scripts/ShowCPEmailInfo/ShowCPEmailInfo.py index 6129adfaad39..2e736b1ffc9c 100644 --- a/Packs/CheckPointHEC/Scripts/ShowCPEmailInfo/ShowCPEmailInfo.py +++ b/Packs/CheckPointHEC/Scripts/ShowCPEmailInfo/ShowCPEmailInfo.py @@ -30,21 +30,29 @@ def dict_to_md(info: dict) -> str: return '\n'.join(lines) -def get_email_info(entity: str): +def get_email_info(entity: str, instance: str) -> tuple[bool, str]: email_info = demisto.executeCommand( "checkpointhec-get-entity", - {'entity': entity} + {'entity': entity, 'using': instance} )[0]['Contents'] - return dict_to_md(email_info) + if isinstance(email_info, str): + return False, email_info + + return True, dict_to_md(email_info) def main(): # pragma: no cover try: - custom_fields = demisto.incident()['CustomFields'] + incident = demisto.incident() + instance = incident['sourceInstance'] + custom_fields = incident['CustomFields'] if not (email_info := custom_fields.get(EMAIL_INFO_FIELD)): entity = custom_fields.get('checkpointhecentity') - email_info = get_email_info(entity) + success, email_info = get_email_info(entity, instance) + if not success: + raise Exception(email_info) + demisto.executeCommand( "setIncident", { diff --git a/Packs/CheckPointHEC/Scripts/ShowCPEmailInfo/ShowCPEmailInfo.yml b/Packs/CheckPointHEC/Scripts/ShowCPEmailInfo/ShowCPEmailInfo.yml index 9100b7682ab3..d0ad6bdd5520 100644 --- a/Packs/CheckPointHEC/Scripts/ShowCPEmailInfo/ShowCPEmailInfo.yml +++ b/Packs/CheckPointHEC/Scripts/ShowCPEmailInfo/ShowCPEmailInfo.yml @@ -13,7 +13,7 @@ dependson: must: - CheckPointHEC|||checkpointhec-get-entity runonce: false -dockerimage: demisto/python3:3.10.14.91134 +dockerimage: demisto/python3:3.10.14.95137 runas: DBotWeakRole fromversion: 6.9.0 tests: diff --git a/Packs/CheckPointHEC/Scripts/ShowCPEmailInfo/ShowCPEmailInfo_test.py b/Packs/CheckPointHEC/Scripts/ShowCPEmailInfo/ShowCPEmailInfo_test.py index f9307351b51f..19b5b6c97787 100644 --- a/Packs/CheckPointHEC/Scripts/ShowCPEmailInfo/ShowCPEmailInfo_test.py +++ b/Packs/CheckPointHEC/Scripts/ShowCPEmailInfo/ShowCPEmailInfo_test.py @@ -9,7 +9,23 @@ def util_load_json(path): return json.loads(f.read()) -def test_get_email_info(mocker): +def test_get_email_info_error(mocker): + error = 'Error: Entity not found' + + def execute_command(name, args): + if name == 'checkpointhec-get-entity': + return [{'Contents': error}] + + raise ValueError(f'Error: Unknown command or command/argument pair: {name} {args!r}') + + mocker.patch.object(demisto, 'executeCommand', side_effect=execute_command) + + success, email_info = get_email_info('0000', 'CheckPointHEC-instance-1') + assert success is False + assert email_info == error + + +def test_get_email_info_success(mocker): mock_response = util_load_json('./test_data/checkpointhec-get_entity.json') def execute_command(name, args): @@ -23,7 +39,8 @@ def execute_command(name, args): mocker.patch.object(demisto, 'executeCommand', side_effect=execute_command) - email_info = get_email_info('0000') + success, email_info = get_email_info('0000', 'CheckPointHEC-instance-1') + assert success is True assert email_info == dict_to_md(mock_response['responseData'][0]['entityPayload']) diff --git a/Packs/CheckPointHEC/Scripts/ShowCPScanInfo/ShowCPScanInfo.py b/Packs/CheckPointHEC/Scripts/ShowCPScanInfo/ShowCPScanInfo.py index 43c982e8542b..f2ff757b32ac 100644 --- a/Packs/CheckPointHEC/Scripts/ShowCPScanInfo/ShowCPScanInfo.py +++ b/Packs/CheckPointHEC/Scripts/ShowCPScanInfo/ShowCPScanInfo.py @@ -3,24 +3,32 @@ SCAN_INFO_FIELD = 'checkpointhecscaninfo' -def get_scan_info(entity: str) -> str: +def get_scan_info(entity: str, instance: str) -> tuple[bool, str]: scan_info = demisto.executeCommand( "checkpointhec-get-scan-info", - {'entity': entity} + {'entity': entity, 'using': instance} )[0]['Contents'] + if isinstance(scan_info, str): + return False, scan_info + for k, v in scan_info.items(): scan_info[k] = json.loads(v) - return json.dumps(scan_info) + return True, json.dumps(scan_info) def main(): # pragma: no cover try: - custom_fields = demisto.incident()['CustomFields'] + incident = demisto.incident() + instance = incident['sourceInstance'] + custom_fields = incident['CustomFields'] if not (scan_info := custom_fields.get(SCAN_INFO_FIELD)): entity = custom_fields.get('checkpointhecentity') - scan_info = get_scan_info(entity) + success, scan_info = get_scan_info(entity, instance) + if not success: + raise Exception(scan_info) + demisto.executeCommand( "setIncident", { diff --git a/Packs/CheckPointHEC/Scripts/ShowCPScanInfo/ShowCPScanInfo.yml b/Packs/CheckPointHEC/Scripts/ShowCPScanInfo/ShowCPScanInfo.yml index e0784aa78a96..e26220b550eb 100644 --- a/Packs/CheckPointHEC/Scripts/ShowCPScanInfo/ShowCPScanInfo.yml +++ b/Packs/CheckPointHEC/Scripts/ShowCPScanInfo/ShowCPScanInfo.yml @@ -13,7 +13,7 @@ dependson: must: - CheckPointHEC|||checkpointhec-get-scan-info runonce: false -dockerimage: demisto/python3:3.10.14.91134 +dockerimage: demisto/python3:3.10.14.95137 runas: DBotWeakRole fromversion: 6.9.0 tests: diff --git a/Packs/CheckPointHEC/Scripts/ShowCPScanInfo/ShowCPScanInfo_test.py b/Packs/CheckPointHEC/Scripts/ShowCPScanInfo/ShowCPScanInfo_test.py index 5de10417f43d..9dc68579ffdc 100644 --- a/Packs/CheckPointHEC/Scripts/ShowCPScanInfo/ShowCPScanInfo_test.py +++ b/Packs/CheckPointHEC/Scripts/ShowCPScanInfo/ShowCPScanInfo_test.py @@ -9,7 +9,23 @@ def util_load_json(path): return json.loads(f.read()) -def test_get_scan_info(mocker): +def test_get_scan_info_error(mocker): + error = 'Error: Entity not found' + + def execute_command(name, args): + if name == 'checkpointhec-get-scan-info': + return [{'Contents': error}] + + raise ValueError(f'Error: Unknown command or command/argument pair: {name} {args!r}') + + mocker.patch.object(demisto, 'executeCommand', side_effect=execute_command) + + success, scan_info = get_scan_info('0000', 'CheckPointHEC-instance-1') + assert success is False + assert scan_info == error + + +def test_get_scan_info_success(mocker): mock_response = util_load_json('./test_data/checkpointhec-get_entity.json') def execute_command(name, args): @@ -20,5 +36,6 @@ def execute_command(name, args): mocker.patch.object(demisto, 'executeCommand', side_effect=execute_command) - result = get_scan_info('0000') - assert result == json.dumps({'av': mock_response['responseData'][0]['entitySecurityResult']['av']}) + success, scan_info = get_scan_info('0000', 'CheckPointHEC-instance-1') + assert success is True + assert scan_info == json.dumps({'av': mock_response['responseData'][0]['entitySecurityResult']['av']}) diff --git a/Packs/CheckPointHEC/pack_metadata.json b/Packs/CheckPointHEC/pack_metadata.json index c0e99a194a34..28887ac66bb1 100644 --- a/Packs/CheckPointHEC/pack_metadata.json +++ b/Packs/CheckPointHEC/pack_metadata.json @@ -2,7 +2,7 @@ "name": "Check Point Harmony Email and Collaboration (HEC)", "description": "The Best Way to Protect Enterprise Email & Collaboration from phishing, malware, account takeover, data loss, etc.", "support": "partner", - "currentVersion": "1.1.0", + "currentVersion": "1.1.2", "author": "Check Point Harmony Email & Collaboration (HEC)", "url": "https://supportcenter.checkpoint.com/", "email": "EmailSecurity_Support@checkpoint.com", diff --git a/Tests/scripts/infrastructure_tests/tests_data/collect_tests/PR1/Packs/MyXSIAMPack/.pack-ignore b/Packs/CheckPointNDR/.pack-ignore similarity index 100% rename from Tests/scripts/infrastructure_tests/tests_data/collect_tests/PR1/Packs/MyXSIAMPack/.pack-ignore rename to Packs/CheckPointNDR/.pack-ignore diff --git a/Tests/scripts/infrastructure_tests/tests_data/collect_tests/A_xsiam/Packs/CoreAlertFields/.secrets-ignore b/Packs/CheckPointNDR/.secrets-ignore similarity index 100% rename from Tests/scripts/infrastructure_tests/tests_data/collect_tests/A_xsiam/Packs/CoreAlertFields/.secrets-ignore rename to Packs/CheckPointNDR/.secrets-ignore diff --git a/Packs/CheckPointNDR/Author_image.png b/Packs/CheckPointNDR/Author_image.png new file mode 100644 index 000000000000..aad1cee22c74 Binary files /dev/null and b/Packs/CheckPointNDR/Author_image.png differ diff --git a/Packs/CheckPointNDR/CONTRIBUTORS.json b/Packs/CheckPointNDR/CONTRIBUTORS.json new file mode 100644 index 000000000000..386c88b8a077 --- /dev/null +++ b/Packs/CheckPointNDR/CONTRIBUTORS.json @@ -0,0 +1,3 @@ +[ + "Max Nosko" +] \ No newline at end of file diff --git a/Packs/CheckPointNDR/IncidentTypes/incidenttype-CheckPoint_NDR_Insight.json b/Packs/CheckPointNDR/IncidentTypes/incidenttype-CheckPoint_NDR_Insight.json new file mode 100644 index 000000000000..8647aafa2d93 --- /dev/null +++ b/Packs/CheckPointNDR/IncidentTypes/incidenttype-CheckPoint_NDR_Insight.json @@ -0,0 +1,29 @@ +{ + "id": "Check Point NDR Insight", + "version": -1, + "vcShouldIgnore": false, + "locked": false, + "name": "Check Point NDR Insight", + "prevName": "Check Point NDR Insight", + "color": "#eb5c92", + "hours": 0, + "days": 0, + "weeks": 0, + "hoursR": 0, + "daysR": 0, + "weeksR": 0, + "system": false, + "readonly": false, + "default": false, + "autorun": false, + "disabled": false, + "reputationCalc": 0, + "onChangeRepAlg": 0, + "layout": "Check Point NDR Insight Layout", + "detached": false, + "extractSettings": { + "mode": "Specific", + "fieldCliNameToExtractSettings": {} + }, + "fromVersion": "6.9.0" +} \ No newline at end of file diff --git a/Packs/CheckPointNDR/Integrations/CheckPointNDR/CheckPointNDR.py b/Packs/CheckPointNDR/Integrations/CheckPointNDR/CheckPointNDR.py new file mode 100644 index 000000000000..3593d391c42b --- /dev/null +++ b/Packs/CheckPointNDR/Integrations/CheckPointNDR/CheckPointNDR.py @@ -0,0 +1,176 @@ +import urllib3 +import demistomock as demisto +from CommonServerPython import * + +# Disable insecure warnings +urllib3.disable_warnings() + +DATE_FORMAT = '%Y-%m-%dT%H:%M:%SZ' +NDR_URL = 'https://now.checkpoint.com' + + +class Client(BaseClient): + def __init__(self, base_url: str, client_id: str, access_key: str, domain: str, verify: bool, proxy: bool): + + super().__init__(base_url=base_url, verify=verify, proxy=proxy) + + self.client_id = client_id + self.access_key = access_key + self.domain = domain + + def _login(self) -> None: + auth_data = {"selectedRealm": "ssl_vpn_Username_Password", + "userName": self.client_id, + "password": self.access_key} + + if self._session.cookies: + self._session.cookies.clear() + + res = self._http_request('POST', url_suffix='/Login/LoginAPI', data=auth_data, resp_type='response') + + if res.status_code == 200: + demisto.debug(f"Log-in successful, new API endpoint is: {res.url}") + self._base_url = res.url + else: + raise DemistoException(f"Log-in failed: {str(res.status_code)}: {res.text}") + + def _logout(self) -> None: + if self._session.cookies: + self._http_request('POST', url_suffix='/../Portal/SignOut', resp_type='response') + + def _call_api(self, + url_suffix: str, + method: str = "GET", + params: Optional[dict[str, Any]] = None, + json_data: Optional[dict[str, Any]] = None) -> list[dict[str, Any]]: + + res_json = self._http_request( + method, + url_suffix=f'/incidents/v1/{url_suffix}', + headers={'domain': self.domain}, + params=params, + json_data=json_data + ) + + if res_json.get('status') and res_json.get('status') == "error": + raise DemistoException(f"API call failed: {res_json.get('message')}") + return res_json.get('objects') + + def get_insights(self, startTS: int, max_fetch: int): + self._login() + insights = self._call_api('insights', params={'updated': f"gt.{startTS}"}) + demisto.debug(f"Fetched {len(insights)} NDR Insights, processing {min(len(insights), max_fetch)} of them...") + + insights = sorted(insights, key=lambda x: x['updated'])[:max_fetch] + for insight in insights: + ids = ','.join(map(str, insight['events'])) + insight['events'] = self._call_api('events', params={'id': ids}) + demisto.debug(f"Fetched {len(insight['events'])} events of Insight {insight['id']}") + + self._logout() + + return insights + + +def test_module(client: Client, last_run: dict[str, str], first_fetch: datetime, domain: str): + try: + fetch_incidents(client, last_run, first_fetch, domain, 1) + return 'ok' + except DemistoException as e: + return e.message + + +def parse_insights(insights: list[dict[str, Any]], domain: str, startTS: int, max_fetch: int): + incidents: list[dict[str, Any]] = [] + for insight in insights: + for event in insight['events']: + if event['updated'] <= startTS: + continue + + id = f"{insight['id']}_{event['id']}" + name = insight['data'].get('name', insight['criteria']) + updated = int(event['data'].get('discovery_date', event['updated'])) + desc_i = insight['data'].get('description', '') + desc_e = event['data'].get('description', '') + description = desc_i + "\n" + desc_e if desc_e else desc_i + link = f"{NDR_URL}/#/insights?id={insight['id']}&domain={domain}&startDate={event['from']}&endDate={event['to']}" + severity = 3 + if event['probability'] < 60: + severity = 1 + elif event['probability'] < 80: + severity = 2 + + incidents.append({ + 'type': 'Check Point NDR Insight', + 'dbotMirrorId': id, + 'name': name, + 'severity': severity, + 'occurred': datetime.utcfromtimestamp(updated / 1000).strftime("%Y-%m-%dT%H:%M:%S.%fZ"), + 'updated': event['updated'], + 'details': description, + 'CustomFields': { + 'externalstarttime': datetime.utcfromtimestamp(event['from'] / 1000).strftime("%Y-%m-%dT%H:%M:%S.%fZ"), + 'externalendtime': datetime.utcfromtimestamp(event['to'] / 1000).strftime("%Y-%m-%dT%H:%M:%S.%fZ"), + 'externallink': link, + 'description': desc_i, + 'eventdescriptions': desc_e + }, + 'rawJSON': json.dumps(event) + }) + + incidents = sorted(incidents, key=lambda x: x['updated'])[:max_fetch] + last_time = datetime.fromtimestamp((incidents[-1]['updated'] if len(incidents) > 0 else startTS) / 1000).isoformat() + + demisto.debug(f"Made {len(incidents)} XSOAR incidents") + return incidents, last_time + + +def fetch_incidents(client: Client, last_run: dict[str, str], first_fetch: datetime, domain: str, max_fetch: int): + last_fetch = last_run.get('last_fetch', first_fetch.isoformat()) + last_fetch_time = dateparser.parse(last_fetch) + if not last_fetch_time: + raise Exception(f"Invalid last fetch time value '{last_fetch}'") + + startTS = int(last_fetch_time.timestamp() * 1000) + insights = client.get_insights(startTS, max_fetch) + incidents, last_insight_time = parse_insights(insights, domain, startTS, max_fetch) + + return {'last_fetch': last_insight_time}, incidents + + +def main() -> None: # pragma: no cover + params = demisto.params() + + base_url = params.get('url', "") + client_id = params.get('credentials', {}).get('identifier') + access_key = params.get('credentials', {}).get('password') + domain = params.get('domain', "") + verify = not params.get('insecure', False) + proxy = params.get('proxy', False) + max_fetch = int(params.get('max_fetch', 1000)) + + fetch_time = params.get('first_fetch', '3 days').strip() + first_fetch = dateparser.parse(fetch_time, settings={'TIMEZONE': 'UTC'}) + if not first_fetch: + raise Exception(f"Invalid first fetch time value '{fetch_time}', must be '