From 329050f67d35eadac29e0ff0805a479924e4aff3 Mon Sep 17 00:00:00 2001 From: Noy-Maimon <72340690+Noy-Maimon@users.noreply.github.com> Date: Wed, 18 May 2022 20:46:25 +0300 Subject: [PATCH] Missing dependencies fix (#19052) * ignore dependencies of packs from other MP * test pack * fix dependencies * fix random dependencies * debug logs * change * test change * remove failing pack * d * test change * remove testing * remove testing files * add removed pack * add removed pack * lint --- Packs/CommonPlaybooks/pack_metadata.json | 4 - Packs/CreatePlbkDoc/pack_metadata.json | 2 +- .../pack_metadata.json | 2 +- .../pack_metadata.json | 4 - .../pack_metadata.json | 2 +- Packs/PANOSPolicyOptimizer/pack_metadata.json | 4 - Packs/PANOStoCDLMonitoring/pack_metadata.json | 2 +- Packs/PcapAnalysis/pack_metadata.json | 4 - Packs/PhishingURL/pack_metadata.json | 2 +- Packs/PrismaCloud/pack_metadata.json | 16 ---- Packs/TIM_Processing/pack_metadata.json | 4 - Packs/Wiz/pack_metadata.json | 2 +- Tests/Marketplace/marketplace_services.py | 17 +++- Tests/Marketplace/upload_packs.py | 86 ++++++++----------- Utils/trigger_test_upload_flow.sh | 22 ++--- 15 files changed, 69 insertions(+), 104 deletions(-) diff --git a/Packs/CommonPlaybooks/pack_metadata.json b/Packs/CommonPlaybooks/pack_metadata.json index 5ffe69aba76d..3afce6288bf3 100644 --- a/Packs/CommonPlaybooks/pack_metadata.json +++ b/Packs/CommonPlaybooks/pack_metadata.json @@ -32,10 +32,6 @@ "mandatory": false, "display_name": "McAfee ePO" }, - "McAfee ePO v2": { - "mandatory": false, - "display_name": "McAfee ePO v2" - }, "CheckpointFirewall": { "mandatory": false, "display_name": "Check Point Firewall" diff --git a/Packs/CreatePlbkDoc/pack_metadata.json b/Packs/CreatePlbkDoc/pack_metadata.json index ad10550e1f2c..6c82a91c4c19 100644 --- a/Packs/CreatePlbkDoc/pack_metadata.json +++ b/Packs/CreatePlbkDoc/pack_metadata.json @@ -15,7 +15,7 @@ "mazmat-panw" ], "dependencies": { - "Demisto REST API": { + "DemistoRESTAPI": { "mandatory": true, "display_name": "Demisto REST API" } diff --git a/Packs/ForwardXSOARAuditLogsToSplunkHEC/pack_metadata.json b/Packs/ForwardXSOARAuditLogsToSplunkHEC/pack_metadata.json index c7b9a12749cd..2520429db459 100644 --- a/Packs/ForwardXSOARAuditLogsToSplunkHEC/pack_metadata.json +++ b/Packs/ForwardXSOARAuditLogsToSplunkHEC/pack_metadata.json @@ -11,7 +11,7 @@ "mandatory": true, "display_name": "SplunkPy" }, - "Demisto REST API": { + "DemistoRESTAPI": { "mandatory": true, "display_name": "Demisto REST API" } diff --git a/Packs/GoogleChronicleBackstory/pack_metadata.json b/Packs/GoogleChronicleBackstory/pack_metadata.json index d865e09112bd..34c89a75f936 100644 --- a/Packs/GoogleChronicleBackstory/pack_metadata.json +++ b/Packs/GoogleChronicleBackstory/pack_metadata.json @@ -106,10 +106,6 @@ "mandatory": false, "display_name": "ThreatConnect" }, - "XFE": { - "mandatory": false, - "display_name": "IBM X-Force Exchange" - }, "NIST": { "mandatory": true, "display_name": "NIST" diff --git a/Packs/IntegrationsAndIncidentsHealthCheck/pack_metadata.json b/Packs/IntegrationsAndIncidentsHealthCheck/pack_metadata.json index 524edb170544..156cb4b91957 100644 --- a/Packs/IntegrationsAndIncidentsHealthCheck/pack_metadata.json +++ b/Packs/IntegrationsAndIncidentsHealthCheck/pack_metadata.json @@ -42,7 +42,7 @@ "mandatory": false, "display_name": "Mail Sender (New)" }, - "Demisto REST API": { + "DemistoRESTAPI": { "mandatory": true, "display_name": "Demisto REST API" } diff --git a/Packs/PANOSPolicyOptimizer/pack_metadata.json b/Packs/PANOSPolicyOptimizer/pack_metadata.json index 62998092102b..1a5e88e0b882 100644 --- a/Packs/PANOSPolicyOptimizer/pack_metadata.json +++ b/Packs/PANOSPolicyOptimizer/pack_metadata.json @@ -14,10 +14,6 @@ "useCases": [], "keywords": [], "dependencies": { - "Panorama": { - "mandatory": true, - "display_name": "PAN-OS" - }, "CommonScripts": { "mandatory": true, "display_name": "Common Scripts" diff --git a/Packs/PANOStoCDLMonitoring/pack_metadata.json b/Packs/PANOStoCDLMonitoring/pack_metadata.json index 1208c441482a..e37bb04e7148 100644 --- a/Packs/PANOStoCDLMonitoring/pack_metadata.json +++ b/Packs/PANOStoCDLMonitoring/pack_metadata.json @@ -21,7 +21,7 @@ "mandatory": true, "display_name": "PAN-OS" }, - "Cortex Data Lake": { + "CortexDataLake": { "mandatory": true, "display_name": "Cortex Data Lake" } diff --git a/Packs/PcapAnalysis/pack_metadata.json b/Packs/PcapAnalysis/pack_metadata.json index 5da77900acd6..c44a8db42a12 100644 --- a/Packs/PcapAnalysis/pack_metadata.json +++ b/Packs/PcapAnalysis/pack_metadata.json @@ -89,10 +89,6 @@ "mandatory": false, "display_name": "Flashpoint" }, - "XFE": { - "mandatory": false, - "display_name": "IBM X-Force Exchange" - }, "GoogleChronicleBackstory": { "mandatory": false, "display_name": "Chronicle" diff --git a/Packs/PhishingURL/pack_metadata.json b/Packs/PhishingURL/pack_metadata.json index 814897dad5fd..f61a50de9723 100644 --- a/Packs/PhishingURL/pack_metadata.json +++ b/Packs/PhishingURL/pack_metadata.json @@ -22,7 +22,7 @@ "mandatory": true, "display_name": "Rasterize" }, - "whois": { + "Whois": { "mandatory": true, "display_name": "WHOIS" } diff --git a/Packs/PrismaCloud/pack_metadata.json b/Packs/PrismaCloud/pack_metadata.json index dbb47d57da9e..2198fb331b0a 100644 --- a/Packs/PrismaCloud/pack_metadata.json +++ b/Packs/PrismaCloud/pack_metadata.json @@ -50,22 +50,6 @@ "mandatory": false, "display_name": "AWS - IAM" }, - "Azure-AKS": { - "mandatory": false, - "display_name": "Azure - AKS" - }, - "Azure-SQL": { - "mandatory": false, - "display_name": "Azure - SQL" - }, - "Azure-Network": { - "mandatory": false, - "display_name": "Azure - Network" - }, - "Azure-Storage": { - "mandatory": false, - "display_name": "Azure - Storage" - }, "GoogleKubernetesEngine": { "mandatory": false, "display_name": "Google Kubernetes Engine" diff --git a/Packs/TIM_Processing/pack_metadata.json b/Packs/TIM_Processing/pack_metadata.json index 40f9b0c7f865..0aeb4e3e9e76 100644 --- a/Packs/TIM_Processing/pack_metadata.json +++ b/Packs/TIM_Processing/pack_metadata.json @@ -82,10 +82,6 @@ "mandatory": false, "display_name": "Palo Alto Networks WildFire" }, - "XFE": { - "mandatory": false, - "display_name": "IBM X-Force Exchange" - }, "ReversingLabs_Titanium_Cloud": { "mandatory": false, "display_name": "ReversingLabs Titanium Cloud" diff --git a/Packs/Wiz/pack_metadata.json b/Packs/Wiz/pack_metadata.json index 925bc1d0d13e..127c37fc9765 100644 --- a/Packs/Wiz/pack_metadata.json +++ b/Packs/Wiz/pack_metadata.json @@ -26,7 +26,7 @@ "mandatory": true, "name": "Base" }, - "Generic Webhook": { + "GenericWebhook": { "mandatory": true, "name": "Generic Webhook" } diff --git a/Tests/Marketplace/marketplace_services.py b/Tests/Marketplace/marketplace_services.py index 030c246b43bb..e919fa472984 100644 --- a/Tests/Marketplace/marketplace_services.py +++ b/Tests/Marketplace/marketplace_services.py @@ -119,10 +119,15 @@ def __init__(self, pack_name, pack_path): @property def name(self): - """ str: pack root folder name. + """ str: pack name. """ return self._pack_name + def id(self): + """ str: pack root folder name. + """ + return self._pack_name + @property def path(self): """ str: pack folder full path. @@ -661,6 +666,8 @@ def _load_pack_dependencies_metadata(self, index_folder_path, packs_dict): Case 2: The dependency is missing from the index.zip since it is a new pack. In this case, handle missing dependency - This means we mark this pack as 'missing dependency', and once the new index.zip is created, and therefore it contains the new pack, we call this function again, and hitting case 1. + Case 3: The dependency is of a pack that is not a part of this marketplace. In this case, we ignore this + dependency. Args: index_folder_path (str): full path to download index folder. packs_dict (dict): dict of all packs relevant for current marketplace, as {pack_id: pack_object}. @@ -682,12 +689,16 @@ def _load_pack_dependencies_metadata(self, index_folder_path, packs_dict): with open(dependency_metadata_path, 'r') as metadata_file: dependency_metadata = json.load(metadata_file) dependencies_metadata_result[dependency_pack_id] = dependency_metadata - else: + elif dependency_pack_id in packs_dict: # Case 2: the dependency is not in the index since it is a new pack self._is_missing_dependencies = True logging.warning(f"{self._pack_name} pack dependency with id {dependency_pack_id} " f"was not found in index, marking it as missing dependencies - to be resolved in " f"next iteration over packs") + else: + # Case 3: the dependency is not a part of this marketplace + logging.warning(f"{self._pack_name} pack dependency with id {dependency_pack_id} " + f"is not part of this marketplace, ignoring this dependency") return dependencies_metadata_result, self._is_missing_dependencies @@ -2038,7 +2049,7 @@ def format_metadata(self, index_folder_path, packs_dependencies_mapping, build_n build_number (str): circleCI build number. commit_hash (str): current commit hash. statistics_handler (StatisticsHandler): The marketplace statistics handler - packs_dict (dict): dict of all packs relevant for current marketplace, as {pack_id: pack_object}. + packs_dict (dict): dict of all packs relevant for current marketplace, as {pack_name: pack_object}. marketplace (str): Marketplace of current upload. format_dependencies_only (bool): Indicates whether the metadata formation is just for formatting the dependencies or not. diff --git a/Tests/Marketplace/upload_packs.py b/Tests/Marketplace/upload_packs.py index 0bcfb2f500a4..e4f7021e116f 100644 --- a/Tests/Marketplace/upload_packs.py +++ b/Tests/Marketplace/upload_packs.py @@ -925,58 +925,48 @@ def upload_packs_with_dependencies_zip(storage_bucket, storage_base_path, signat storage_base_path (str): The upload destination in the target bucket for all packs (in the format of /content/Packs). storage_bucket (google.cloud.storage.bucket.Bucket): google cloud storage bucket. - packs_for_current_marketplace_dict (dict): Dict of packs relevant for current marketplace as {pack_name: pack_object} + packs_for_current_marketplace_dict (dict): Dict of packs relevant for current marketplace as + {pack_name: pack_object} """ logging.info("Starting to collect pack with dependencies zips") for pack_name, pack in packs_for_current_marketplace_dict.items(): - if pack.status != PackStatus.SUCCESS.name and pack.status not in SKIPPED_STATUS_CODES: - # avoid trying to upload dependencies zip for failed packs - continue try: - logging.info(f"Collecting dependencies of {pack_name}") - pack_with_dep_path = os.path.join(pack.path, "with_dependencies") - zip_with_deps_path = os.path.join(pack.path, f"{pack_name}_with_dependencies.zip") - upload_path = os.path.join(storage_base_path, pack_name, f"{pack_name}_with_dependencies.zip") - Path(pack_with_dep_path).mkdir(parents=True, exist_ok=True) - if not (pack.zip_path and os.path.isfile(pack.zip_path)): - task_status = sign_and_zip_pack(pack, signature_key) + if pack.status not in [*SKIPPED_STATUS_CODES, PackStatus.SUCCESS.name]: + # avoid trying to upload dependencies zip for failed packs + continue + pack_and_its_dependencies = [packs_for_current_marketplace_dict.get(dep_name) for dep_name in + pack.all_levels_dependencies] + [pack] + pack_or_dependency_was_uploaded = any(dep_pack.status == PackStatus.SUCCESS.name for dep_pack in + pack_and_its_dependencies) + if pack_or_dependency_was_uploaded: + pack_with_dep_path = os.path.join(pack.path, "with_dependencies") + zip_with_deps_path = os.path.join(pack.path, f"{pack_name}_with_dependencies.zip") + upload_path = os.path.join(storage_base_path, pack_name, f"{pack_name}_with_dependencies.zip") + Path(pack_with_dep_path).mkdir(parents=True, exist_ok=True) + for current_pack in pack_and_its_dependencies: + logging.debug(f"Starting to collect zip of pack {current_pack.name}") + # zip the pack and each of the pack's dependencies (or copy existing zip if was already zipped) + if not (current_pack.zip_path and os.path.isfile(current_pack.zip_path)): + # the zip does not exist yet, zip the current pack + task_status = sign_and_zip_pack(current_pack, signature_key) + if not task_status: + # modify the pack's status to indicate the failure was in the dependencies zip step + current_pack.status = PackStatus.FAILED_CREATING_DEPENDENCIES_ZIP_SIGNING.name + logging.debug(f"Skipping uploading {pack.name} since failed zipping {current_pack.name}.") + continue + shutil.copy(current_pack.zip_path, os.path.join(pack_with_dep_path, current_pack.name + ".zip")) + logging.info(f"Zipping {pack_name} with its dependencies") + Pack.zip_folder_items(pack_with_dep_path, pack_with_dep_path, zip_with_deps_path) + shutil.rmtree(pack_with_dep_path) + logging.info(f"Uploading {pack_name} with its dependencies") + task_status, _, _ = pack.upload_to_storage(zip_with_deps_path, '', storage_bucket, True, + storage_base_path, overridden_upload_path=upload_path) + logging.info(f"{pack_name} with dependencies was{' not' if not task_status else ''} " + f"uploaded successfully") if not task_status: - # modify the pack's status to indicate the failure was in the dependencies zip step - pack.status = PackStatus.FAILED_CREATING_DEPENDENCIES_ZIP_SIGNING.name - logging.warning(f"Skipping dependencies collection for {pack_name}. Failed zipping") - continue - shutil.copy(pack.zip_path, os.path.join(pack_with_dep_path, pack_name + ".zip")) - for dep_name in pack.all_levels_dependencies: - dep_pack = packs_for_current_marketplace_dict.get(dep_name) - if not (dep_pack.zip_path and os.path.isfile(dep_pack.zip_path)): - task_status = sign_and_zip_pack(dep_pack, signature_key) - if not task_status: - # modify the pack's status to indicate the failure was in the dependencies zip step - pack.status = PackStatus.FAILED_CREATING_DEPENDENCIES_ZIP_SIGNING.name - logging.error(f"Skipping dependency {pack_name}. Failed zipping") - continue - shutil.copy(dep_pack.zip_path, os.path.join(pack_with_dep_path, dep_name + '.zip')) - logging.info(f"Zipping {pack_name} with dependencies") - Pack.zip_folder_items( - pack_with_dep_path, - pack_with_dep_path, - zip_with_deps_path - ) - shutil.rmtree(pack_with_dep_path) - logging.info(f"Uploading {pack_name} with dependencies") - task_status, _, _ = pack.upload_to_storage( - zip_pack_path=zip_with_deps_path, - latest_version='', - storage_bucket=storage_bucket, - override_pack=True, - storage_base_path=storage_base_path, - overridden_upload_path=upload_path - ) - logging.info(f"{pack_name} with dependencies was{' not' if not task_status else ''} uploaded successfully") - if not task_status: - pack.status = PackStatus.FAILED_CREATING_DEPENDENCIES_ZIP_UPLOADING.name - pack.cleanup() + pack.status = PackStatus.FAILED_CREATING_DEPENDENCIES_ZIP_UPLOADING.name + pack.cleanup() except Exception as e: logging.error(traceback.format_exc()) logging.error(f"Failed uploading packs with dependencies: {e}") @@ -1071,7 +1061,7 @@ def main(): is_bucket_upload_flow, ci_branch) # detect packs to upload - pack_names = get_packs_names(target_packs, previous_commit_hash) + pack_names = get_packs_names(target_packs, previous_commit_hash) # list of the pack's ids extract_packs_artifacts(packs_artifacts_path, extract_destination_path) packs_list = [Pack(pack_name, os.path.join(extract_destination_path, pack_name)) for pack_name in pack_names if os.path.exists(os.path.join(extract_destination_path, pack_name))] @@ -1208,7 +1198,7 @@ def main(): pack.status = PackStatus.SUCCESS.name - logging.info(f"packs_with_missing_dependencies: {packs_with_missing_dependencies}") + logging.info(f"packs_with_missing_dependencies: {[pack.name for pack in packs_with_missing_dependencies]}") # Going over all packs that were marked as missing dependencies, # updating them with the new data for the new packs that were added to the index.zip diff --git a/Utils/trigger_test_upload_flow.sh b/Utils/trigger_test_upload_flow.sh index 3702f4ce4ed4..f754ae2de287 100755 --- a/Utils/trigger_test_upload_flow.sh +++ b/Utils/trigger_test_upload_flow.sh @@ -102,17 +102,17 @@ if [ -n "$_force" ] && [ -n "$_storage_base_path"]; then echo "Can not force upload while using a storage base path." exit 1 fi -#if [[ -n "$_storage_base_path" ]] && [ "$_storage_base_path" != *content ]; then -# echo "$_storage_base_path" -# echo "The given storage base path should look like upload-flow/builds/branch_name/build_number/content." -# exit 1 -#fi -# -#if [[ -n "$_storage_base_path" ]] && [ "$_storage_base_path" != upload-flow* ]; then -# echo $_storage_base_path -# echo "The given storage base path should look like upload-flow/builds/branch_name/build_number/content." -# exit 1 -#fi +if [ -n "$_storage_base_path" ] && [[ "$_storage_base_path" != *content ]]; then + echo "$_storage_base_path" + echo "The given storage base path should look like upload-flow/builds/branch_name/build_number/content.1" + exit 1 +fi + +if [ -n "$_storage_base_path" ] && [[ "$_storage_base_path" != upload-flow* ]]; then + echo $_storage_base_path + echo "The given storage base path should look like upload-flow/builds/branch_name/build_number/content.2" + exit 1 +fi if [ -n "$_gitlab" ]; then