diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 568d48e556b..9702a361e70 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -13,3 +13,8 @@ updates: labels: - "dependencies" - "pip" + - package-ecosystem: "github-actions" + directory: "/" + schedule: + interval: "weekly" + target-branch: master diff --git a/.github/labeler.yml b/.github/labeler.yml new file mode 100644 index 00000000000..d4df3619b36 --- /dev/null +++ b/.github/labeler.yml @@ -0,0 +1,27 @@ +documentation: + - changed-files: + - any-glob-to-any-file: "docs/**" + +provider/aws: + - changed-files: + - any-glob-to-any-file: "prowler/providers/aws/**" + - any-glob-to-any-file: "tests/providers/aws/**" + +provider/azure: + - changed-files: + - any-glob-to-any-file: "prowler/providers/azure/**" + - any-glob-to-any-file: "tests/providers/azure/**" + +provider/gcp: + - changed-files: + - any-glob-to-any-file: "prowler/providers/gcp/**" + - any-glob-to-any-file: "tests/providers/gcp/**" + +provider/kubernetes: + - changed-files: + - any-glob-to-any-file: "prowler/providers/kubernetes/**" + - any-glob-to-any-file: "tests/providers/kubernetes/**" + +github_actions: + - changed-files: + - any-glob-to-any-file: ".github/workflows/*" diff --git a/.github/workflows/build-documentation-on-pr.yml b/.github/workflows/build-documentation-on-pr.yml new file mode 100644 index 00000000000..015a3742d65 --- /dev/null +++ b/.github/workflows/build-documentation-on-pr.yml @@ -0,0 +1,24 @@ +name: Pull Request Documentation Link + +on: + pull_request: + branches: + - 'master' + - 'prowler-4.0-dev' + paths: + - 'docs/**' + +env: + PR_NUMBER: ${{ github.event.pull_request.number }} + +jobs: + documentation-link: + name: Documentation Link + runs-on: ubuntu-latest + steps: + - name: Leave PR comment with the SaaS Documentation URI + uses: peter-evans/create-or-update-comment@v4 + with: + issue-number: ${{ env.PR_NUMBER }} + body: | + You can check the documentation for this PR here -> [SaaS Documentation](https://prowler-prowler-docs--${{ env.PR_NUMBER }}.com.readthedocs.build/projects/prowler-open-source/en/${{ env.PR_NUMBER }}/) diff --git a/.github/workflows/build-lint-push-containers.yml b/.github/workflows/build-lint-push-containers.yml index 2e8ec437504..43fd281c13a 100644 --- a/.github/workflows/build-lint-push-containers.yml +++ b/.github/workflows/build-lint-push-containers.yml @@ -32,11 +32,11 @@ jobs: POETRY_VIRTUALENVS_CREATE: "false" steps: - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Setup python (release) if: github.event_name == 'release' - uses: actions/setup-python@v2 + uses: actions/setup-python@v5 with: python-version: ${{ env.PYTHON_VERSION }} @@ -52,13 +52,13 @@ jobs: poetry version ${{ github.event.release.tag_name }} - name: Login to DockerHub - uses: docker/login-action@v2 + uses: docker/login-action@v3 with: username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_TOKEN }} - name: Login to Public ECR - uses: docker/login-action@v2 + uses: docker/login-action@v3 with: registry: public.ecr.aws username: ${{ secrets.PUBLIC_ECR_AWS_ACCESS_KEY_ID }} @@ -67,11 +67,11 @@ jobs: AWS_REGION: ${{ env.AWS_REGION }} - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v2 + uses: docker/setup-buildx-action@v3 - name: Build and push container image (latest) if: github.event_name == 'push' - uses: docker/build-push-action@v2 + uses: docker/build-push-action@v5 with: push: true tags: | @@ -83,7 +83,7 @@ jobs: - name: Build and push container image (release) if: github.event_name == 'release' - uses: docker/build-push-action@v2 + uses: docker/build-push-action@v5 with: # Use local context to get changes # https://github.com/docker/build-push-action#path-context diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index b02607fe38e..a788c10cbbb 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -37,11 +37,11 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 # Initializes the CodeQL tools for scanning. - name: Initialize CodeQL - uses: github/codeql-action/init@v2 + uses: github/codeql-action/init@v3 with: languages: ${{ matrix.language }} # If you wish to specify custom queries, you can do so here or in a config file. @@ -52,6 +52,6 @@ jobs: # queries: security-extended,security-and-quality - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v2 + uses: github/codeql-action/analyze@v3 with: category: "/language:${{matrix.language}}" diff --git a/.github/workflows/find-secrets.yml b/.github/workflows/find-secrets.yml index f5166d3e8b9..6428ee0a0ad 100644 --- a/.github/workflows/find-secrets.yml +++ b/.github/workflows/find-secrets.yml @@ -7,11 +7,11 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: fetch-depth: 0 - name: TruffleHog OSS - uses: trufflesecurity/trufflehog@v3.4.4 + uses: trufflesecurity/trufflehog@v3.68.2 with: path: ./ base: ${{ github.event.repository.default_branch }} diff --git a/.github/workflows/labeler.yml b/.github/workflows/labeler.yml new file mode 100644 index 00000000000..25d6135bc47 --- /dev/null +++ b/.github/workflows/labeler.yml @@ -0,0 +1,16 @@ +name: "Pull Request Labeler" + +on: + pull_request_target: + branches: + - "master" + - "prowler-4.0-dev" + +jobs: + labeler: + permissions: + contents: read + pull-requests: write + runs-on: ubuntu-latest + steps: + - uses: actions/labeler@v5 diff --git a/.github/workflows/pull-request.yml b/.github/workflows/pull-request.yml index 6353157b66d..a884abc3a26 100644 --- a/.github/workflows/pull-request.yml +++ b/.github/workflows/pull-request.yml @@ -14,13 +14,13 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: ["3.9", "3.10", "3.11"] + python-version: ["3.9", "3.10", "3.11", "3.12"] steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Test if changes are in not ignored paths id: are-non-ignored-files-changed - uses: tj-actions/changed-files@v39 + uses: tj-actions/changed-files@v42 with: files: ./** files_ignore: | @@ -28,6 +28,7 @@ jobs: README.md docs/** permissions/** + mkdocs.yml - name: Install poetry if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true' run: | @@ -35,7 +36,7 @@ jobs: pipx install poetry - name: Set up Python ${{ matrix.python-version }} if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true' - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} cache: "poetry" @@ -87,6 +88,6 @@ jobs: poetry run pytest -n auto --cov=./prowler --cov-report=xml tests - name: Upload coverage reports to Codecov if: steps.are-non-ignored-files-changed.outputs.any_changed == 'true' - uses: codecov/codecov-action@v3 + uses: codecov/codecov-action@v4 env: CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} diff --git a/.github/workflows/pypi-release.yml b/.github/workflows/pypi-release.yml index 59b8af68de7..53289f1c2ea 100644 --- a/.github/workflows/pypi-release.yml +++ b/.github/workflows/pypi-release.yml @@ -16,7 +16,7 @@ jobs: name: Release Prowler to PyPI steps: # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: ref: ${{ env.GITHUB_BRANCH }} - name: Install dependencies @@ -24,7 +24,7 @@ jobs: pipx install poetry pipx inject poetry poetry-bumpversion - name: setup python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: 3.9 cache: 'poetry' @@ -44,7 +44,7 @@ jobs: poetry publish # Create pull request with new version - name: Create Pull Request - uses: peter-evans/create-pull-request@v4 + uses: peter-evans/create-pull-request@v6 with: token: ${{ secrets.PROWLER_ACCESS_TOKEN }} commit-message: "chore(release): update Prowler Version to ${{ env.RELEASE_TAG }}." diff --git a/.github/workflows/refresh_aws_services_regions.yml b/.github/workflows/refresh_aws_services_regions.yml index e22bedc3334..fea14d3bce6 100644 --- a/.github/workflows/refresh_aws_services_regions.yml +++ b/.github/workflows/refresh_aws_services_regions.yml @@ -23,12 +23,12 @@ jobs: # Steps represent a sequence of tasks that will be executed as part of the job steps: # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: ref: ${{ env.GITHUB_BRANCH }} - name: setup python - uses: actions/setup-python@v2 + uses: actions/setup-python@v5 with: python-version: 3.9 #install the python needed @@ -38,7 +38,7 @@ jobs: pip install boto3 - name: Configure AWS Credentials -- DEV - uses: aws-actions/configure-aws-credentials@v1 + uses: aws-actions/configure-aws-credentials@v4 with: aws-region: ${{ env.AWS_REGION_DEV }} role-to-assume: ${{ secrets.DEV_IAM_ROLE_ARN }} @@ -50,12 +50,12 @@ jobs: # Create pull request - name: Create Pull Request - uses: peter-evans/create-pull-request@v4 + uses: peter-evans/create-pull-request@v6 with: token: ${{ secrets.PROWLER_ACCESS_TOKEN }} commit-message: "feat(regions_update): Update regions for AWS services." branch: "aws-services-regions-updated-${{ github.sha }}" - labels: "status/waiting-for-revision, severity/low" + labels: "status/waiting-for-revision, severity/low, provider/aws" title: "chore(regions_update): Changes in regions for AWS services." body: | ### Description diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 9c01e1d92ef..d58fc12bb1d 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,7 +1,7 @@ repos: ## GENERAL - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.4.0 + rev: v4.5.0 hooks: - id: check-merge-conflict - id: check-yaml @@ -15,7 +15,7 @@ repos: ## TOML - repo: https://github.com/macisamuele/language-formatters-pre-commit-hooks - rev: v2.10.0 + rev: v2.12.0 hooks: - id: pretty-format-toml args: [--autofix] @@ -28,7 +28,7 @@ repos: - id: shellcheck ## PYTHON - repo: https://github.com/myint/autoflake - rev: v2.2.0 + rev: v2.2.1 hooks: - id: autoflake args: @@ -39,25 +39,25 @@ repos: ] - repo: https://github.com/timothycrosley/isort - rev: 5.12.0 + rev: 5.13.2 hooks: - id: isort args: ["--profile", "black"] - repo: https://github.com/psf/black - rev: 22.12.0 + rev: 24.1.1 hooks: - id: black - repo: https://github.com/pycqa/flake8 - rev: 6.1.0 + rev: 7.0.0 hooks: - id: flake8 exclude: contrib args: ["--ignore=E266,W503,E203,E501,W605"] - repo: https://github.com/python-poetry/poetry - rev: 1.6.0 # add version here + rev: 1.7.0 hooks: - id: poetry-check - id: poetry-lock @@ -80,18 +80,12 @@ repos: - id: trufflehog name: TruffleHog description: Detect secrets in your data. - # entry: bash -c 'trufflehog git file://. --only-verified --fail' + entry: bash -c 'trufflehog --no-update git file://. --only-verified --fail' # For running trufflehog in docker, use the following entry instead: - entry: bash -c 'docker run -v "$(pwd):/workdir" -i --rm trufflesecurity/trufflehog:latest git file:///workdir --only-verified --fail' + # entry: bash -c 'docker run -v "$(pwd):/workdir" -i --rm trufflesecurity/trufflehog:latest git file:///workdir --only-verified --fail' language: system stages: ["commit", "push"] - - id: pytest-check - name: pytest-check - entry: bash -c 'pytest tests -n auto' - language: system - files: '.*\.py' - - id: bandit name: bandit description: "Bandit is a tool for finding common security issues in Python code" diff --git a/.readthedocs.yaml b/.readthedocs.yaml index 557613e35f5..17d338d2e97 100644 --- a/.readthedocs.yaml +++ b/.readthedocs.yaml @@ -8,16 +8,18 @@ version: 2 build: os: "ubuntu-22.04" tools: - python: "3.9" + python: "3.11" jobs: post_create_environment: # Install poetry # https://python-poetry.org/docs/#installing-manually - - pip install poetry - # Tell poetry to not use a virtual environment - - poetry config virtualenvs.create false + - python -m pip install poetry post_install: - - poetry install -E docs + # Install dependencies with 'docs' dependency group + # https://python-poetry.org/docs/managing-dependencies/#dependency-groups + # VIRTUAL_ENV needs to be set manually for now. + # See https://github.com/readthedocs/readthedocs.org/pull/11152/ + - VIRTUAL_ENV=${READTHEDOCS_VIRTUALENV_PATH} python -m poetry install --only=docs mkdocs: configuration: mkdocs.yml diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md index 419b6d76be5..c4bba21640d 100644 --- a/CODE_OF_CONDUCT.md +++ b/CODE_OF_CONDUCT.md @@ -55,7 +55,7 @@ further defined and clarified by project maintainers. ## Enforcement Instances of abusive, harassing, or otherwise unacceptable behavior may be -reported by contacting the project team at community@prowler.cloud. All +reported by contacting the project team at [support.prowler.com](https://customer.support.prowler.com/servicedesk/customer/portals). All complaints will be reviewed and investigated and will result in a response that is deemed necessary and appropriate to the circumstances. The project team is obligated to maintain confidentiality with regard to the reporter of an incident. diff --git a/README.md b/README.md index 82ab6fe4eeb..43c6f53dae5 100644 --- a/README.md +++ b/README.md @@ -1,24 +1,31 @@
-
-
+
+
- See all the things you and your team can do with ProwlerPro at prowler.pro + Prowler SaaS and Prowler Open Source are as dynamic and adaptable as the environment they’re meant to protect. Trusted by the leaders in security.
-
-
+Learn more at prowler.com
+
+
+
+Join our Prowler community!
-
-
{ \"Sid\": \"Allow CloudTrail access\", \"Effect\": \"Allow\", \"Principal\": { \"Service\": \"cloudtrail.amazonaws.com\" }, \"Action\": \"kms:DescribeKey\", \"Resource\": \"*\" } ``` 2\\. Granting encrypt permissions ```{ \"Sid\": \"Allow CloudTrail to encrypt logs\", \"Effect\": \"Allow\", \"Principal\": { \"Service\": \"cloudtrail.amazonaws.com\" }, \"Action\": \"kms:GenerateDataKey*\", \"Resource\": \"*\", \"Condition\": { \"StringLike\": { \"kms:EncryptionContext:aws:cloudtrail:arn\": [ \"arn:aws:cloudtrail:*:aws-account-id:trail/*\" ] } } } ``` 3\\. Granting decrypt permissions ```{ \"Sid\": \"Enable CloudTrail log decrypt permissions\", \"Effect\": \"Allow\", \"Principal\": { \"AWS\": \"arn:aws:iam::aws-account-id:user/username\" }, \"Action\": \"kms:Decrypt\", \"Resource\": \"*\", \"Condition\": { \"Null\": { \"kms:EncryptionContext:aws:cloudtrail:arn\": \"false\" } } } ```", + "References": "https://docs.aws.amazon.com/awscloudtrail/latest/userguide/encrypting-cloudtrail-log-files-with-aws-kms.html:https://docs.aws.amazon.com/kms/latest/developerguide/create-keys.html" + } + ] + }, + { + "Id": "3.6", + "Description": "Ensure rotation for customer created symmetric CMKs is enabled", + "Checks": [ + "kms_cmk_rotation_enabled" + ], + "Attributes": [ + { + "Section": "3. Logging", + "Profile": "Level 2", + "AssessmentStatus": "Automated", + "Description": "AWS Key Management Service (KMS) allows customers to rotate the backing key which is key material stored within the KMS which is tied to the key ID of the Customer Created customer master key (CMK). It is the backing key that is used to perform cryptographic operations such as encryption and decryption. Automated key rotation currently retains all prior backing keys so that decryption of encrypted data can take place transparently. It is recommended that CMK key rotation be enabled for symmetric keys. Key rotation can not be enabled for any asymmetric CMK.", + "RationaleStatement": "Rotating encryption keys helps reduce the potential impact of a compromised key as data encrypted with a new key cannot be accessed with a previous key that may have been exposed. Keys should be rotated every year, or upon event that would result in the compromise of that key.", + "ImpactStatement": "Creation, management, and storage of CMKs may require additional time from and administrator.", + "RemediationProcedure": "**From Console:** 1. Sign in to the AWS Management Console and open the KMS console at: https://console.aws.amazon.com/kms. 2. In the left navigation pane, click Customer-managed keys. 3. Select a key where Key spec = SYMMETRIC_DEFAULT that does not have automatic rotation enabled. 4. Select the Key rotation tab. 5. Check the Automatically rotate this KMS key every year checkbox. 6. Click Save. 7. Repeat steps 3–6 for all customer-managed CMKs that do not have automatic rotation enabled.", + "AuditProcedure": "**From Console:** 1. Sign in to the AWS Management Console and open the KMS console at: https://console.aws.amazon.com/kms. 2. In the left navigation pane, click Customer-managed keys. 3. Select a customer managed CMK where Key spec = SYMMETRIC_DEFAULT. 4. Select the Key rotation tab. 5. Ensure the Automatically rotate this KMS key every year checkbox is checked. 6. Repeat steps 3–5 for all customer-managed CMKs where 'Key spec = SYMMETRIC_DEFAULT'.", + "AdditionalInformation": "", + "References": "https://aws.amazon.com/kms/pricing/:https://csrc.nist.gov/publications/detail/sp/800-57-part-1/rev-5/final" + } + ] + }, + { + "Id": "3.7", + "Description": "Ensure VPC flow logging is enabled in all VPCs", + "Checks": [ + "vpc_flow_logs_enabled" + ], + "Attributes": [ + { + "Section": "3. Logging", + "Profile": "Level 2", + "AssessmentStatus": "Automated", + "Description": "VPC Flow Logs is a feature that enables you to capture information about the IP traffic going to and from network interfaces in your VPC. After you've created a flow log, you can view and retrieve its data in Amazon CloudWatch Logs. It is recommended that VPC Flow Logs be enabled for packet \"Rejects\" for VPCs.", + "RationaleStatement": "VPC Flow Logs provide visibility into network traffic that traverses the VPC and can be used to detect anomalous traffic or insight during security workflows.", + "ImpactStatement": "By default, CloudWatch Logs will store Logs indefinitely unless a specific retention period is defined for the log group. When choosing the number of days to retain, keep in mind the average days it takes an organization to realize they have been breached is 210 days (at the time of this writing). Since additional time is required to research a breach, a minimum 365 day retention policy allows time for detection and research. You may also wish to archive the logs to a cheaper storage service rather than simply deleting them. See the following AWS resource to manage CloudWatch Logs retention periods: 1. https://docs.aws.amazon.com/AmazonCloudWatch/latest/DeveloperGuide/SettingLogRetention.html", + "RemediationProcedure": "Perform the following to determine if VPC Flow logs is enabled: **From Console:** 1. Sign into the management console 2. Select `Services` then `VPC` 3. In the left navigation pane, select `Your VPCs` 4. Select a VPC 5. In the right pane, select the `Flow Logs` tab. 6. If no Flow Log exists, click `Create Flow Log` 7. For Filter, select `Reject` 8. Enter in a `Role` and `Destination Log Group` 9. Click `Create Log Flow` 10. Click on `CloudWatch Logs Group` **Note:** Setting the filter to \"Reject\" will dramatically reduce the logging data accumulation for this recommendation and provide sufficient information for the purposes of breach detection, research and remediation. However, during periods of least privilege security group engineering, setting this the filter to \"All\" can be very helpful in discovering existing traffic flows required for proper operation of an already running environment. **From Command Line:** 1. Create a policy document and name it as `role_policy_document.json` and paste the following content: ``` { \"Version\": \"2012-10-17\", \"Statement\": [ { \"Sid\": \"test\", \"Effect\": \"Allow\", \"Principal\": { \"Service\": \"ec2.amazonaws.com\" }, \"Action\": \"sts:AssumeRole\" } ] } ``` 2. Create another policy document and name it as `iam_policy.json` and paste the following content: ``` { \"Version\": \"2012-10-17\", \"Statement\": [ { \"Effect\": \"Allow\", \"Action\":[ \"logs:CreateLogGroup\", \"logs:CreateLogStream\", \"logs:DescribeLogGroups\", \"logs:DescribeLogStreams\", \"logs:PutLogEvents\", \"logs:GetLogEvents\", \"logs:FilterLogEvents\" ], \"Resource\": \"*\" } ] } ``` 3. Run the below command to create an IAM role: ``` aws iam create-role --role-name--assume-role-policy-document file:// role_policy_document.json ``` 4. Run the below command to create an IAM policy: ``` aws iam create-policy --policy-name --policy-document file:// iam-policy.json ``` 5. Run `attach-group-policy` command using the IAM policy ARN returned at the previous step to attach the policy to the IAM role (if the command succeeds, no output is returned): ``` aws iam attach-group-policy --policy-arn arn:aws:iam:: :policy/ --group-name ``` 6. Run `describe-vpcs` to get the VpcId available in the selected region: ``` aws ec2 describe-vpcs --region ``` 7. The command output should return the VPC Id available in the selected region. 8. Run `create-flow-logs` to create a flow log for the vpc: ``` aws ec2 create-flow-logs --resource-type VPC --resource-ids