Skip to content

Fix memory leak and also detect failed memory allocation #1290

Fix memory leak and also detect failed memory allocation

Fix memory leak and also detect failed memory allocation #1290

Workflow file for this run

# This workflow runs CodeQL against platform builds in the repository.
#
# Any platform that supports the `--codeql` parameter will be built and the
# results will be uploaded to GitHub Code Scanning.
#
# Note: Important: This file only works with "platform" builds. "CI" builds are
# supported with the codeql.yml file.
#
# Note: This workflow only supports Windows as CodeQL CLI has confirmed issues running
# against edk2-style codebases on Linux (only tested on Ubuntu). Therefore, this
# workflow is written only for Windows but could easily be adapted to run on Linux
# in the future if needed (e.g. swap out "windows" with agent OS var value, etc.)
#
# For details about the Linux issue see: https://github.com/github/codeql-action/issues/1338
#
# NOTE: This file is automatically synchronized from Mu DevOps. Update the original file there
# instead of the file in this repo.
#
# - Mu DevOps Repo: https://github.com/microsoft/mu_devops
# - File Sync Settings: https://github.com/microsoft/mu_devops/blob/main/.sync/Files.yml
#
# Copyright (c) Microsoft Corporation.
# SPDX-License-Identifier: BSD-2-Clause-Patent
name: "CodeQL - Platform"
on:
push:
branches:
- main
- release/*
- dev/*
pull_request:
branches:
- main
- release/*
- dev/*
paths-ignore:
- '!**.c'
- '!**.h'
jobs:
gather_build_files:
name: Gather Platform Build Files
runs-on: ubuntu-latest
outputs:
platform_build_files: ${{ steps.generate_matrix.outputs.platform_build_files }}
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Install Python
uses: actions/setup-python@v5
with:
python-version: '3.12'
cache: 'pip'
cache-dependency-path: 'pip-requirements.txt'
- name: Install/Upgrade pip Modules
run: pip install -r pip-requirements.txt --upgrade
- name: Generate Package Matrix
id: generate_matrix
shell: python
run: |
import os
import json
from pathlib import Path
def supports_parameter(script_path: str, parameter: str):
import subprocess
try:
# Run the script with the --help parameter and capture the output
# Note: subprocess.run() failed in the GitHub workflow
process = subprocess.Popen(['python', script_path, '--help'],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
output, error = process.communicate()
if process.returncode != 0:
print(f"::error title=CodeQL Check Failed!::Failed to determine if the platform is CodeQL compatible! Return code {process.returncode}. {error}")
return False
return parameter in output.decode('utf-8')
except Exception as e:
print(f"::error title=CodeQL Check Exception!::Exception occurred while checking if the platform is CodeQL compatible! {e}")
return False
root_dir = Path(os.environ['GITHUB_WORKSPACE'])
packages = [d for d in root_dir.rglob('*') if d.is_dir() and d.name.strip().lower().endswith('pkg')]
platform_build_files = []
# This can be made more robust than just checking if a PlatformBuild.py
# file exists in the package directory, but the additional complexity is
# not needed right now given current package construction conventions.
for package in packages:
platform_build_file = package / "PlatformBuild.py"
if platform_build_file.exists() and platform_build_file.is_file() \
and any(file.endswith('.dsc') for file in os.listdir(package)) \
and supports_parameter(str(platform_build_file), "--codeql"):
platform_build_files.append(str(platform_build_file.relative_to(root_dir)))
platform_build_files.sort()
print(platform_build_files)
with open(os.environ['GITHUB_OUTPUT'], 'a') as fh:
print(f'platform_build_files={json.dumps(platform_build_files)}', file=fh)
analyze:
name: Analyze
runs-on: windows-2022
needs:
- gather_build_files
permissions:
actions: read
contents: read
security-events: write
strategy:
fail-fast: false
matrix:
build_file: ${{ fromJson(needs.gather_build_files.outputs.platform_build_files) }}
include:
- tool_chain_tag: VS2022
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Install Python
uses: actions/setup-python@v5
with:
python-version: '3.12'
cache: 'pip'
cache-dependency-path: 'pip-requirements.txt'
- name: Use Git Long Paths on Windows
if: runner.os == 'Windows'
shell: pwsh
run: |
git config --system core.longpaths true
- name: Install/Upgrade pip Modules
run: pip install -r pip-requirements.txt --upgrade requests
- name: Get Cargo Tool Details
id: get_cargo_tool_details
shell: python
run: |
import os
import requests
import sys
import time
def get_response_with_retries(url, retries=5, wait_time=10):
for attempt in range(retries):
response = requests.get(url)
if response.status_code == 200:
return response
print(f"::warning title=GitHub API Access Error!::Attempt {attempt + 1} failed. Retrying in {wait_time} seconds...")
time.sleep(wait_time)
return response
GITHUB_REPO = "sagiegurari/cargo-make"
api_url = f"https://api.github.com/repos/{GITHUB_REPO}/releases/tags/0.37.9"
response = get_response_with_retries(api_url)
if response.status_code == 200:
build_release_id = response.json()["id"]
else:
print("::error title=GitHub Release Error!::Failed to get cargo-make release ID!")
sys.exit(1)
api_url = f"https://api.github.com/repos/{GITHUB_REPO}/releases/{build_release_id}"
response = get_response_with_retries(api_url)
if response.status_code == 200:
latest_cargo_make_version = response.json()["tag_name"]
else:
print("::error title=GitHub Release Error!::Failed to get cargo-make!")
sys.exit(1)
cache_key = f'cargo-make-{latest_cargo_make_version}'
with open(os.environ['GITHUB_OUTPUT'], 'a') as fh:
print(f'cargo_bin_path={os.path.join(os.environ["USERPROFILE"], ".cargo", "bin")}', file=fh)
print(f'cargo_make_cache_key={cache_key}', file=fh)
print(f'cargo_make_version={latest_cargo_make_version}', file=fh)
- name: Attempt to Load cargo-make From Cache
id: cargo_make_cache
uses: actions/cache@v4
with:
path: ${{ steps.get_cargo_tool_details.outputs.cargo_bin_path }}
key: ${{ steps.get_cargo_tool_details.outputs.cargo_make_cache_key }}
- name: Download cargo-make
if: steps.cargo_make_cache.outputs.cache-hit != 'true'
uses: robinraju/release-downloader@v1.11
with:
repository: 'sagiegurari/cargo-make'
tag: '${{ steps.get_cargo_tool_details.outputs.cargo_make_version }}'
fileName: 'cargo-make-v${{ steps.get_cargo_tool_details.outputs.cargo_make_version }}-x86_64-pc-windows-msvc.zip'
out-file-path: 'cargo-make-download'
token: ${{ secrets.GITHUB_TOKEN }}
- name: Extract cargo-make
if: steps.cargo_make_cache.outputs.cache-hit != 'true'
env:
CARGO_MAKE_VERSION: ${{ steps.get_cargo_tool_details.outputs.cargo_make_version }}
DEST_DIR: ${{steps.get_cargo_tool_details.outputs.cargo_bin_path }}
shell: python
run: |
import os
import shutil
import zipfile
from pathlib import Path
DOWNLOAD_DIR = Path(os.environ["GITHUB_WORKSPACE"], "cargo-make-download")
ZIP_FILE_NAME = f"cargo-make-v{os.environ['CARGO_MAKE_VERSION']}-x86_64-pc-windows-msvc.zip"
ZIP_FILE_PATH = Path(DOWNLOAD_DIR, ZIP_FILE_NAME)
EXTRACT_DIR = Path(DOWNLOAD_DIR, "cargo-make-contents")
with zipfile.ZipFile(ZIP_FILE_PATH, 'r') as zip_ref:
zip_ref.extractall(EXTRACT_DIR)
for extracted_file in EXTRACT_DIR.iterdir():
if extracted_file.name == "cargo-make.exe":
shutil.copy2(extracted_file, os.environ["DEST_DIR"])
break
- name: Rust Prep
run: rustup component add rust-src
- name: Get Platform Information
id: get_platform_info
env:
BUILD_FILE_PATH: ${{ matrix.build_file }}
shell: python
run: |
import importlib
import inspect
import os
import sys
from pathlib import Path
from edk2toolext.invocables.edk2_platform_build import BuildSettingsManager
from edk2toolext.invocables.edk2_ci_setup import CiSetupSettingsManager
from edk2toolext.invocables.edk2_setup import SetupSettingsManager
platform_build_file = Path(os.environ['BUILD_FILE_PATH'])
if not platform_build_file.is_file():
print(f"::error title=Invalid Build File!::Failed to find {str(platform_build_file)}!")
sys.exit(1)
# Load the module
module_name = 'platform_settings'
spec = importlib.util.spec_from_file_location(module_name, platform_build_file)
module = importlib.util.module_from_spec(spec)
spec.loader.exec_module(module)
# Get info from the platform build file
pkg_name = "UnknownPkg"
ci_setup_supported = False
setup_supported = False
for name, obj in inspect.getmembers(module):
if inspect.isclass(obj):
if issubclass(obj, CiSetupSettingsManager):
ci_setup_supported = True
if issubclass(obj, SetupSettingsManager):
setup_supported = True
if issubclass(obj, BuildSettingsManager):
try:
pkg_name = obj().GetName()
except AttributeError:
print(f"::error title=Invalid Package name!::Failed to get package name in {str(platform_build_file)}!")
sys.exit(1)
with open(os.environ['GITHUB_OUTPUT'], 'a') as fh:
print(f'ci_setup_supported={str(ci_setup_supported).lower()}', file=fh)
print(f'setup_supported={str(setup_supported).lower()}', file=fh)
print(f'pkg_name={pkg_name}', file=fh)
- name: Assign Temp Drive Letter
if: runner.os == 'Windows'
shell: pwsh
run: |
subst Z: ${{ github.workspace }}
- name: Setup
if: steps.get_platform_info.outputs.setup_supported == 'true'
shell: pwsh
working-directory: "Z:"
run: stuart_setup -c ${{ matrix.build_file }} -t DEBUG TOOL_CHAIN_TAG=${{ matrix.tool_chain_tag }}
- name: Upload Setup Log As An Artifact
uses: actions/upload-artifact@v4
if: (success() || failure()) && steps.get_platform_info.outputs.setup_supported == 'true'
with:
name: ${{ steps.get_platform_info.outputs.pkg_name }}-Setup-Log
path: |
**/SETUPLOG.txt
retention-days: 7
if-no-files-found: ignore
- name: CI Setup
if: steps.get_platform_info.outputs.ci_setup_supported == 'true'
shell: pwsh
working-directory: "Z:"
run: stuart_ci_setup -c ${{ matrix.build_file }} -t DEBUG TOOL_CHAIN_TAG=${{ matrix.tool_chain_tag }}
- name: Upload CI Setup Log As An Artifact
uses: actions/upload-artifact@v4
if: (success() || failure()) && steps.get_platform_info.outputs.ci_setup_supported == 'true'
with:
name: ${{ steps.get_platform_info.outputs.pkg_name }}-CI-Setup-Log
path: |
**/CISETUP.txt
retention-days: 7
if-no-files-found: ignore
- name: Update
shell: pwsh
working-directory: "Z:"
run: stuart_update -c ${{ matrix.build_file }} -t DEBUG TOOL_CHAIN_TAG=${{ matrix.tool_chain_tag }}
- name: Upload Update Log As An Artifact
uses: actions/upload-artifact@v4
if: success() || failure()
with:
name: ${{ steps.get_platform_info.outputs.pkg_name }}-Update-Log
path: |
**/UPDATE_LOG.txt
retention-days: 7
if-no-files-found: ignore
- name: Find CodeQL Plugin Directory
id: find_dir
shell: python
run: |
import os
import sys
from pathlib import Path
#
# Find the plugin directory that contains the CodeQL plugin.
#
# Prior to Mu Basecore 202311, the CodeQL plugin was located in .pytool. After it
# is located in BaseTools. First check BaseTools, but consider .pytool as a backup
# for backward compatibility. The .pytool backup can be removed when no longer needed
# for supported branches.
#
plugin_dir = list(Path(os.environ['GITHUB_WORKSPACE']).rglob('BaseTools/Plugin/CodeQL'))
if not plugin_dir:
plugin_dir = list(Path(os.environ['GITHUB_WORKSPACE']).rglob('.pytool/Plugin/CodeQL'))
# This should only be found once
if len(plugin_dir) == 1:
plugin_dir = str(plugin_dir[0])
with open(os.environ['GITHUB_OUTPUT'], 'a') as fh:
print(f'codeql_plugin_dir={plugin_dir}', file=fh)
else:
print("::error title=Workspace Error!::Failed to find Mu Basecore plugin directory!")
sys.exit(1)
- name: Get CodeQL CLI Cache Data
id: cache_key_gen
env:
CODEQL_PLUGIN_DIR: ${{ steps.find_dir.outputs.codeql_plugin_dir }}
shell: python
run: |
import os
import yaml
codeql_cli_ext_dep_name = 'codeqlcli_windows_ext_dep'
codeql_plugin_file = os.path.join(os.environ['CODEQL_PLUGIN_DIR'], codeql_cli_ext_dep_name + '.yaml')
with open (codeql_plugin_file) as pf:
codeql_cli_ext_dep = yaml.safe_load(pf)
cache_key_name = codeql_cli_ext_dep['name']
cache_key_version = codeql_cli_ext_dep['version']
cache_key = f'{cache_key_name}-{cache_key_version}'
codeql_plugin_cli_ext_dep_dir = os.path.join(os.environ['CODEQL_PLUGIN_DIR'], codeql_cli_ext_dep['name'].strip() + '_extdep')
with open(os.environ['GITHUB_OUTPUT'], 'a') as fh:
print(f'codeql_cli_cache_key={cache_key}', file=fh)
print(f'codeql_cli_ext_dep_dir={codeql_plugin_cli_ext_dep_dir}', file=fh)
- name: Attempt to Load CodeQL CLI From Cache
id: codeqlcli_cache
uses: actions/cache@v4
with:
path: ${{ steps.cache_key_gen.outputs.codeql_cli_ext_dep_dir }}
key: ${{ steps.cache_key_gen.outputs.codeql_cli_cache_key }}
- name: Download CodeQL CLI
if: steps.codeqlcli_cache.outputs.cache-hit != 'true'
shell: pwsh
working-directory: "Z:"
run: stuart_update -c ${{ matrix.build_file }} -t DEBUG TOOL_CHAIN_TAG=${{ matrix.tool_chain_tag }} --codeql
- name: Find pytool Plugin Directory
id: find_pytool_dir
shell: python
run: |
import os
import sys
from pathlib import Path
# Find the plugin directory that contains the Compiler plugin
plugin_dir = list(Path(os.environ['GITHUB_WORKSPACE']).rglob('.pytool/Plugin/CompilerPlugin'))
# This should only be found once
if len(plugin_dir) == 1:
# If the directory is found get the parent Plugin directory
plugin_dir = str(plugin_dir[0].parent)
with open(os.environ['GITHUB_OUTPUT'], 'a') as fh:
print(f'pytool_plugin_dir={plugin_dir}', file=fh)
else:
print("::error title=Workspace Error!::Failed to find Mu Basecore .pytool/Plugin directory!")
sys.exit(1)
- name: Remove CI Plugins Irrelevant to CodeQL
shell: python
env:
PYTOOL_PLUGIN_DIR: ${{ steps.find_pytool_dir.outputs.pytool_plugin_dir }}
run: |
import os
import shutil
from pathlib import Path
# Only these two plugins are needed for CodeQL.
#
# CodeQL build time is reduced by removing other plugins that are not needed for the CodeQL
# build in the .pytool directory. The CompilerPlugin is required to compile code for CodeQL
# to extract results from and the CodeQL plugin is necessary to to analyze the results and
# build the CodeQL database from them. The CodeQL plugin should be in BaseTools moving forward
# but still might be in .pytool in older branches so it is kept here as an exception.
#
plugins_to_keep = ['CodeQL', 'CompilerPlugin']
plugin_dir = Path(os.environ['PYTOOL_PLUGIN_DIR']).absolute()
if plugin_dir.is_dir():
for dir in plugin_dir.iterdir():
if str(dir.stem) not in plugins_to_keep:
shutil.rmtree(str(dir.absolute()), ignore_errors=True)
- name: Platform Build
shell: pwsh
working-directory: "Z:"
env:
RUST_ENV_CHECK_TOOL_EXCLUSIONS: "cargo fmt, cargo tarpaulin"
STUART_CODEQL_PATH: ${{ steps.cache_key_gen.outputs.codeql_cli_ext_dep_dir }}
run: stuart_build -c ${{ matrix.build_file }} TARGET=DEBUG TOOL_CHAIN_TAG=${{ matrix.tool_chain_tag }} --codeql
- name: Build Cleanup
id: build_cleanup
shell: python
run: |
import os
import shutil
from pathlib import Path
dirs_to_delete = ['ia32', 'x64', 'arm', 'aarch64']
def delete_dirs(path: Path):
if path.exists() and path.is_dir():
if path.name.lower() in dirs_to_delete:
print(f'Removed {str(path)}')
shutil.rmtree(path)
return
for child_dir in path.iterdir():
delete_dirs(child_dir)
build_path = Path(os.environ['GITHUB_WORKSPACE'], 'Build')
delete_dirs(build_path)
- name: Upload Build Logs As An Artifact
uses: actions/upload-artifact@v4
if: success() || failure()
with:
name: ${{ steps.get_platform_info.outputs.pkg_name }}-Build-Logs
path: |
**/BUILD_REPORT.TXT
**/OVERRIDELOG.TXT
**/BUILDLOG_*.md
**/BUILDLOG_*.txt
**/CI_*.md
**/CI_*.txt
retention-days: 7
if-no-files-found: ignore
- name: Prepare Env Data for CodeQL Upload
id: env_data
env:
PACKAGE_NAME: ${{ steps.get_platform_info.outputs.pkg_name }}
shell: python
run: |
import os
package = os.environ['PACKAGE_NAME'].strip().lower()
directory_name = 'codeql-analysis-' + package + '-debug'
file_name = 'codeql-db-' + package + '-debug-0.sarif'
sarif_path = os.path.join('Build', directory_name, file_name)
with open(os.environ['GITHUB_OUTPUT'], 'a') as fh:
print(f'sarif_file_path={sarif_path}', file=fh)
- name: Upload CodeQL Results (SARIF) As An Artifact
uses: actions/upload-artifact@v4
with:
name: ${{ steps.get_platform_info.outputs.pkg_name }}-CodeQL-SARIF
path: ${{ steps.env_data.outputs.sarif_file_path }}
retention-days: 14
if-no-files-found: warn
- name: Upload CodeQL Results (SARIF) To GitHub Code Scanning
uses: github/codeql-action/upload-sarif@v3
with:
# Path to SARIF file relative to the root of the repository.
sarif_file: ${{ steps.env_data.outputs.sarif_file_path }}
# Optional category for the results. Used to differentiate multiple results for one commit.
# Each package is a separate category.
category: ${{ steps.get_platform_info.outputs.pkg_name }}
- name: Remove Temp Drive Letter
if: runner.os == 'Windows'
shell: pwsh
run: |
subst Z: /D