diff --git a/analyzers/bandit/.deepsource/analyzer/analyzer.toml b/analyzers/bandit/.deepsource/analyzer/analyzer.toml
new file mode 100644
index 0000000..7ef1d3b
--- /dev/null
+++ b/analyzers/bandit/.deepsource/analyzer/analyzer.toml
@@ -0,0 +1,7 @@
+# Analyzer Profile
+category = "sec"
+name = "A security linter from PyCQA"
+shortcode = "bandit"
+status = "active"
+tool_latest_version = "1.7.8"
+description = " Bandit is a tool designed to find common security issues in Python code."
diff --git a/analyzers/bandit/.deepsource/analyzer/example_config.toml b/analyzers/bandit/.deepsource/analyzer/example_config.toml
new file mode 100644
index 0000000..fe8dce4
--- /dev/null
+++ b/analyzers/bandit/.deepsource/analyzer/example_config.toml
@@ -0,0 +1,5 @@
+version = 1
+
+[[analyzers]]
+name = "bandit"
+type = "community"
diff --git a/analyzers/bandit/.deepsource/analyzer/logo.svg b/analyzers/bandit/.deepsource/analyzer/logo.svg
new file mode 100644
index 0000000..0204d32
--- /dev/null
+++ b/analyzers/bandit/.deepsource/analyzer/logo.svg
@@ -0,0 +1,51 @@
+
+
+
+
diff --git a/analyzers/bandit/.deepsource/analyzer/silencers.json b/analyzers/bandit/.deepsource/analyzer/silencers.json
new file mode 100644
index 0000000..e69de29
diff --git a/analyzers/bandit/CI/circleci.yaml b/analyzers/bandit/CI/circleci.yaml
new file mode 100644
index 0000000..071be27
--- /dev/null
+++ b/analyzers/bandit/CI/circleci.yaml
@@ -0,0 +1,29 @@
+version: 2.1
+
+jobs:
+ run-bandit:
+ docker:
+ - image: cimg/python:3.12
+ steps:
+ - checkout
+ - run:
+ name: Install bandit
+ command: |
+ python -m pip install --upgrade pip
+ pip3 install bandit
+ - run:
+ name: Run bandit
+ command: |
+ bandit -r . -f sarif -o bandit.sarif || true
+ - run:
+ name: Upload SARIF report to DeepSource
+ command: |
+ # Install the DeepSource CLI
+ curl https://deepsource.io/cli | sh
+ # Send the report to DeepSource
+ ./bin/deepsource report --analyzer bandit --analyzer-type community --value-file ./bandit.sarif
+
+workflows:
+ run-bandit-workflow:
+ jobs:
+ - run-bandit
\ No newline at end of file
diff --git a/analyzers/bandit/CI/github-actions.yaml b/analyzers/bandit/CI/github-actions.yaml
new file mode 100644
index 0000000..d26ebeb
--- /dev/null
+++ b/analyzers/bandit/CI/github-actions.yaml
@@ -0,0 +1,42 @@
+# A copy-paste Github Actions config to run bandit and report the artifact to DeepSource
+name: Scan with bandit
+
+on:
+ # Note that both `push` and `pull_request` triggers should be present for GitHub to consistently present bandit
+ # SARIF reports.
+ push:
+ branches: [ main, master ]
+ pull_request:
+
+jobs:
+ scan:
+ runs-on: ubuntu-latest
+ env:
+ DEEPSOURCE_DSN: ${{ secrets.DEEPSOURCE_DSN }}
+ steps:
+ - uses: actions/checkout@v3
+ with:
+ ref: ${{ github.event.pull_request.head.sha }}
+
+ - name: Scan files with bandit
+ uses: docker://ghcr.io/pycqa/bandit
+
+ - name: Generate the SARIF report
+ id: bandit-action-scan
+ run: |
+ bandit -r . -f sarif -o bandit.sarif
+ continue-on-error: true
+
+ - name: Upload SARIF report files to DeepSource
+ run: |
+ # Install the CLI
+ curl https://deepsource.io/cli | sh
+
+ # Send the report to DeepSource
+ ./bin/deepsource report --analyzer bandit --analyzer-type community --value-file ./bandit.sarif
+
+ - name: Verify bandit-action succeeded
+ shell: bash
+ run: |
+ echo "If this step fails, bandit found issues. Check the output of the scan step above."
+ [[ "${{ steps.bandit-action-scan.outcome }}" == "success" ]]
\ No newline at end of file
diff --git a/analyzers/bandit/CI/gitlab-ci.yaml b/analyzers/bandit/CI/gitlab-ci.yaml
new file mode 100644
index 0000000..771815c
--- /dev/null
+++ b/analyzers/bandit/CI/gitlab-ci.yaml
@@ -0,0 +1,14 @@
+stages:
+ - scan
+
+scan:
+ stage: scan
+ image: python:latest # Using Python image because bandit is a Python package
+ before_script:
+ - pip install bandit # Install bandit
+ script:
+ - bandit -r . -f sarif -o bandit.sarif || true # Run bandit and generate SARIF report
+ - curl -sSL https://deepsource.io/cli | sh # Install DeepSource CLI
+ - ./bin/deepsource report --analyzer bandit --analyzer-type community --value-file ./bandit.sarif # Upload SARIF report to DeepSource
+ rules:
+ - if: $CI_PIPELINE_SOURCE == "push"
diff --git a/analyzers/bandit/utils/issue_gen.py b/analyzers/bandit/utils/issue_gen.py
new file mode 100644
index 0000000..506ba3f
--- /dev/null
+++ b/analyzers/bandit/utils/issue_gen.py
@@ -0,0 +1,178 @@
+# To run: python issue_gen.py --root_directory=
+import argparse
+import ast
+import json
+import os
+from pathlib import Path
+from textwrap import dedent
+from typing import Dict, List, Optional, Union
+from urllib.parse import unquote, urlparse
+
+
+def concat_binop(binop: ast.AST) -> str:
+ """
+ Recursively concatenate binary operation nodes into a single string.
+
+ Args:
+ binop (ast.AST): Binary operation node.
+
+ Returns:
+ str: Concatenated string.
+ """
+ if isinstance(binop, ast.BinOp):
+ return concat_binop(binop.left) + concat_binop(binop.right)
+ if isinstance(binop, ast.Constant):
+ return binop.value
+ return ""
+
+
+def extract_class_attributes(node: ast.ClassDef) -> Dict[str, Union[str, List[str]]]:
+ """
+ Extract class attributes from a ClassDef node in an abstract syntax tree.
+
+ Args:
+ node (ast.ClassDef): ClassDef node.
+
+ Returns:
+ Dict[str, Union[str, List[str]]]: Extracted class attributes.
+ """
+ class_data = {}
+ for item in node.body:
+ if isinstance(item, ast.Assign):
+ for target in item.targets:
+ if target.id in ["id", "shortdesc", "description", "source_url"]:
+ class_data[target.id] = concat_binop(item.value)
+ elif target.id == "tags":
+ class_data["tags"] = [
+ concat_binop(element) for element in item.value.elts
+ ]
+ return class_data
+
+
+def extract_attributes_from_code(code: str) -> Dict[str, Union[str, List[str]]]:
+ """
+ Extract attributes from Python code.
+
+ Args:
+ code (str): Python code as a string.
+
+ Returns:
+ Dict[str, Union[str, List[str]]]: Extracted attributes.
+ """
+ class_data = {}
+ tree = ast.parse(code)
+ for node in ast.walk(tree):
+ if isinstance(node, ast.ClassDef):
+ class_data.update(extract_class_attributes(node))
+ return class_data
+
+
+def extract_page_name(url: str) -> Optional[str]:
+ """
+ Extract the page name from a URL.
+
+ Args:
+ url (str): Input URL.
+
+ Returns:
+ Optional[str]: Extracted page name or None if not found.
+ """
+ parsed_url = urlparse(url)
+ path_segments = parsed_url.path.strip("/").split("/")
+ if path_segments:
+ last_segment = os.path.splitext(path_segments[-1])[0]
+ page_name = unquote(last_segment.replace("-", " ")).title()
+ return page_name.replace("Cfn", "CloudFormation").replace(
+ "Cloudformation", "CloudFormation"
+ )
+ return None
+
+
+def build_toml(issue: Dict[str, Union[str, List[str]]]) -> str:
+ """
+ Build a TOML string from issue data.
+
+ Args:
+ issue (Dict[str, Union[str, List[str]]]): Issue data.
+
+ Returns:
+ str: TOML string.
+ """
+ title = issue["shortdesc"]
+ description = issue["description"]
+ source_url = issue.get("source_url", "")
+ tags = issue.get("tags", [])
+
+ content = f"""\
+ title = {json.dumps(title.capitalize())}
+ verbose_name = "{issue["id"]}"
+ severity = "major"
+ category = "antipattern"
+ weight = 70
+ tags = [{", ".join([json.dumps(tag) for tag in tags])}]
+ description = '''
+ {description}
+
+ ### References:
+ [{extract_page_name(source_url)}]({source_url})
+ '''
+ """
+ return dedent(content)
+
+
+def write_to_file(issue: Dict[str, Union[str, List[str]]]) -> None:
+ """
+ Write issue data to a TOML file.
+
+ Args:
+ issue (Dict[str, Union[str, List[str]]]): Issue data.
+ """
+ file_name = f"./issues/BANDIT-{issue['id']}.toml"
+ with open(file_name, "w") as file:
+ file.write(build_toml(issue))
+
+
+def extract_attributes_from_directory(
+ directory: str,
+) -> List[Dict[str, Union[str, List[str]]]]:
+ """
+ Extract attributes from Python files in a directory.
+
+ Args:
+ directory (str): Root directory to search for Python files.
+
+ Returns:
+ List[Dict[str, Union[str, List[str]]]]: List of extracted attributes.
+ """
+ all_classes_data = []
+ for root, _, files in os.walk(directory):
+ for file in files:
+ if file.endswith(".py"):
+ file_path = os.path.join(root, file)
+ with open(file_path, "r", encoding="utf-8") as f:
+ try:
+ code = f.read()
+ class_data = extract_attributes_from_code(code)
+ if class_data:
+ all_classes_data.append(class_data)
+ except Exception as e:
+ print(f"Error parsing file {file}: {e}")
+ return all_classes_data
+
+
+if __name__ == "__main__":
+ parser = argparse.ArgumentParser(
+ description="Extract attributes from Python files in a given directory."
+ )
+ parser.add_argument(
+ "--root_directory", help="Root directory of the bandit repository."
+ )
+ args = parser.parse_args()
+
+ base = Path(args.root_directory)
+ path = Path("bandit/src/bandit/rules")
+ rules_directory = base / path
+
+ attributes_list = extract_attributes_from_directory(rules_directory)
+ for attributes in attributes_list:
+ write_to_file(attributes)
diff --git a/analyzers/bandit/utils/issue_map_gen.py b/analyzers/bandit/utils/issue_map_gen.py
new file mode 100644
index 0000000..bc91075
--- /dev/null
+++ b/analyzers/bandit/utils/issue_map_gen.py
@@ -0,0 +1,43 @@
+# To run: python issue_map_gen.py #
+import json
+import os
+import sys
+from typing import Dict
+
+
+def generate_json(directory: str) -> None:
+ """
+ Generate a JSON file containing mappings of issue codes extracted from TOML files in a directory.
+
+ :param directory: The directory containing TOML files with issue codes.
+ :type directory: str
+ :return: None
+ """
+ # Dictionary to hold the mappings
+ issue_codes: Dict[str, dict] = {}
+
+ # Iterate through each file in the directory
+ for filename in os.listdir(directory):
+ if filename.endswith(".toml") and filename.startswith("BANDIT-"):
+ # Extract the issue code (part after 'CFLIN-' and before '.toml')
+ issue_code = filename[len("BANDIT-") : -len(".toml")]
+
+ # Add to the dictionary
+ issue_codes[issue_code] = {"issue_code": f"BANDIT-{issue_code}"}
+
+ # Convert dictionary to JSON
+ json_data: str = json.dumps(issue_codes, indent=4)
+
+ # Write JSON data to a file in the current directory
+ with open("issue_map.json", "w") as file:
+ file.write(json_data)
+ print("JSON data written to issue_map.json")
+
+
+if __name__ == "__main__":
+ if len(sys.argv) != 2:
+ print("Usage: python script.py ")
+ sys.exit(1)
+
+ target: str = sys.argv[1]
+ generate_json(target)