Skip to content

Commit

Permalink
Missing dependencies fix (demisto#19052)
Browse files Browse the repository at this point in the history
* ignore dependencies of packs from other MP

* test pack

* fix dependencies

* fix random dependencies

* debug logs

* change

* test change

* remove failing pack

* d

* test change

* remove testing

* remove testing files

* add removed pack

* add removed pack

* lint
  • Loading branch information
Noy-Maimon authored May 18, 2022
1 parent d2800ee commit 329050f
Show file tree
Hide file tree
Showing 15 changed files with 69 additions and 104 deletions.
4 changes: 0 additions & 4 deletions Packs/CommonPlaybooks/pack_metadata.json
Original file line number Diff line number Diff line change
Expand Up @@ -32,10 +32,6 @@
"mandatory": false,
"display_name": "McAfee ePO"
},
"McAfee ePO v2": {
"mandatory": false,
"display_name": "McAfee ePO v2"
},
"CheckpointFirewall": {
"mandatory": false,
"display_name": "Check Point Firewall"
Expand Down
2 changes: 1 addition & 1 deletion Packs/CreatePlbkDoc/pack_metadata.json
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
"mazmat-panw"
],
"dependencies": {
"Demisto REST API": {
"DemistoRESTAPI": {
"mandatory": true,
"display_name": "Demisto REST API"
}
Expand Down
2 changes: 1 addition & 1 deletion Packs/ForwardXSOARAuditLogsToSplunkHEC/pack_metadata.json
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
"mandatory": true,
"display_name": "SplunkPy"
},
"Demisto REST API": {
"DemistoRESTAPI": {
"mandatory": true,
"display_name": "Demisto REST API"
}
Expand Down
4 changes: 0 additions & 4 deletions Packs/GoogleChronicleBackstory/pack_metadata.json
Original file line number Diff line number Diff line change
Expand Up @@ -106,10 +106,6 @@
"mandatory": false,
"display_name": "ThreatConnect"
},
"XFE": {
"mandatory": false,
"display_name": "IBM X-Force Exchange"
},
"NIST": {
"mandatory": true,
"display_name": "NIST"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@
"mandatory": false,
"display_name": "Mail Sender (New)"
},
"Demisto REST API": {
"DemistoRESTAPI": {
"mandatory": true,
"display_name": "Demisto REST API"
}
Expand Down
4 changes: 0 additions & 4 deletions Packs/PANOSPolicyOptimizer/pack_metadata.json
Original file line number Diff line number Diff line change
Expand Up @@ -14,10 +14,6 @@
"useCases": [],
"keywords": [],
"dependencies": {
"Panorama": {
"mandatory": true,
"display_name": "PAN-OS"
},
"CommonScripts": {
"mandatory": true,
"display_name": "Common Scripts"
Expand Down
2 changes: 1 addition & 1 deletion Packs/PANOStoCDLMonitoring/pack_metadata.json
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
"mandatory": true,
"display_name": "PAN-OS"
},
"Cortex Data Lake": {
"CortexDataLake": {
"mandatory": true,
"display_name": "Cortex Data Lake"
}
Expand Down
4 changes: 0 additions & 4 deletions Packs/PcapAnalysis/pack_metadata.json
Original file line number Diff line number Diff line change
Expand Up @@ -89,10 +89,6 @@
"mandatory": false,
"display_name": "Flashpoint"
},
"XFE": {
"mandatory": false,
"display_name": "IBM X-Force Exchange"
},
"GoogleChronicleBackstory": {
"mandatory": false,
"display_name": "Chronicle"
Expand Down
2 changes: 1 addition & 1 deletion Packs/PhishingURL/pack_metadata.json
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
"mandatory": true,
"display_name": "Rasterize"
},
"whois": {
"Whois": {
"mandatory": true,
"display_name": "WHOIS"
}
Expand Down
16 changes: 0 additions & 16 deletions Packs/PrismaCloud/pack_metadata.json
Original file line number Diff line number Diff line change
Expand Up @@ -50,22 +50,6 @@
"mandatory": false,
"display_name": "AWS - IAM"
},
"Azure-AKS": {
"mandatory": false,
"display_name": "Azure - AKS"
},
"Azure-SQL": {
"mandatory": false,
"display_name": "Azure - SQL"
},
"Azure-Network": {
"mandatory": false,
"display_name": "Azure - Network"
},
"Azure-Storage": {
"mandatory": false,
"display_name": "Azure - Storage"
},
"GoogleKubernetesEngine": {
"mandatory": false,
"display_name": "Google Kubernetes Engine"
Expand Down
4 changes: 0 additions & 4 deletions Packs/TIM_Processing/pack_metadata.json
Original file line number Diff line number Diff line change
Expand Up @@ -82,10 +82,6 @@
"mandatory": false,
"display_name": "Palo Alto Networks WildFire"
},
"XFE": {
"mandatory": false,
"display_name": "IBM X-Force Exchange"
},
"ReversingLabs_Titanium_Cloud": {
"mandatory": false,
"display_name": "ReversingLabs Titanium Cloud"
Expand Down
2 changes: 1 addition & 1 deletion Packs/Wiz/pack_metadata.json
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@
"mandatory": true,
"name": "Base"
},
"Generic Webhook": {
"GenericWebhook": {
"mandatory": true,
"name": "Generic Webhook"
}
Expand Down
17 changes: 14 additions & 3 deletions Tests/Marketplace/marketplace_services.py
Original file line number Diff line number Diff line change
Expand Up @@ -119,10 +119,15 @@ def __init__(self, pack_name, pack_path):

@property
def name(self):
""" str: pack root folder name.
""" str: pack name.
"""
return self._pack_name

def id(self):
""" str: pack root folder name.
"""
return self._pack_name

@property
def path(self):
""" str: pack folder full path.
Expand Down Expand Up @@ -661,6 +666,8 @@ def _load_pack_dependencies_metadata(self, index_folder_path, packs_dict):
Case 2: The dependency is missing from the index.zip since it is a new pack. In this case, handle missing
dependency - This means we mark this pack as 'missing dependency', and once the new index.zip is
created, and therefore it contains the new pack, we call this function again, and hitting case 1.
Case 3: The dependency is of a pack that is not a part of this marketplace. In this case, we ignore this
dependency.
Args:
index_folder_path (str): full path to download index folder.
packs_dict (dict): dict of all packs relevant for current marketplace, as {pack_id: pack_object}.
Expand All @@ -682,12 +689,16 @@ def _load_pack_dependencies_metadata(self, index_folder_path, packs_dict):
with open(dependency_metadata_path, 'r') as metadata_file:
dependency_metadata = json.load(metadata_file)
dependencies_metadata_result[dependency_pack_id] = dependency_metadata
else:
elif dependency_pack_id in packs_dict:
# Case 2: the dependency is not in the index since it is a new pack
self._is_missing_dependencies = True
logging.warning(f"{self._pack_name} pack dependency with id {dependency_pack_id} "
f"was not found in index, marking it as missing dependencies - to be resolved in "
f"next iteration over packs")
else:
# Case 3: the dependency is not a part of this marketplace
logging.warning(f"{self._pack_name} pack dependency with id {dependency_pack_id} "
f"is not part of this marketplace, ignoring this dependency")

return dependencies_metadata_result, self._is_missing_dependencies

Expand Down Expand Up @@ -2038,7 +2049,7 @@ def format_metadata(self, index_folder_path, packs_dependencies_mapping, build_n
build_number (str): circleCI build number.
commit_hash (str): current commit hash.
statistics_handler (StatisticsHandler): The marketplace statistics handler
packs_dict (dict): dict of all packs relevant for current marketplace, as {pack_id: pack_object}.
packs_dict (dict): dict of all packs relevant for current marketplace, as {pack_name: pack_object}.
marketplace (str): Marketplace of current upload.
format_dependencies_only (bool): Indicates whether the metadata formation is just for formatting the
dependencies or not.
Expand Down
86 changes: 38 additions & 48 deletions Tests/Marketplace/upload_packs.py
Original file line number Diff line number Diff line change
Expand Up @@ -925,58 +925,48 @@ def upload_packs_with_dependencies_zip(storage_bucket, storage_base_path, signat
storage_base_path (str): The upload destination in the target bucket for all packs (in the format of
<some_path_in_the_target_bucket>/content/Packs).
storage_bucket (google.cloud.storage.bucket.Bucket): google cloud storage bucket.
packs_for_current_marketplace_dict (dict): Dict of packs relevant for current marketplace as {pack_name: pack_object}
packs_for_current_marketplace_dict (dict): Dict of packs relevant for current marketplace as
{pack_name: pack_object}
"""
logging.info("Starting to collect pack with dependencies zips")
for pack_name, pack in packs_for_current_marketplace_dict.items():
if pack.status != PackStatus.SUCCESS.name and pack.status not in SKIPPED_STATUS_CODES:
# avoid trying to upload dependencies zip for failed packs
continue
try:
logging.info(f"Collecting dependencies of {pack_name}")
pack_with_dep_path = os.path.join(pack.path, "with_dependencies")
zip_with_deps_path = os.path.join(pack.path, f"{pack_name}_with_dependencies.zip")
upload_path = os.path.join(storage_base_path, pack_name, f"{pack_name}_with_dependencies.zip")
Path(pack_with_dep_path).mkdir(parents=True, exist_ok=True)
if not (pack.zip_path and os.path.isfile(pack.zip_path)):
task_status = sign_and_zip_pack(pack, signature_key)
if pack.status not in [*SKIPPED_STATUS_CODES, PackStatus.SUCCESS.name]:
# avoid trying to upload dependencies zip for failed packs
continue
pack_and_its_dependencies = [packs_for_current_marketplace_dict.get(dep_name) for dep_name in
pack.all_levels_dependencies] + [pack]
pack_or_dependency_was_uploaded = any(dep_pack.status == PackStatus.SUCCESS.name for dep_pack in
pack_and_its_dependencies)
if pack_or_dependency_was_uploaded:
pack_with_dep_path = os.path.join(pack.path, "with_dependencies")
zip_with_deps_path = os.path.join(pack.path, f"{pack_name}_with_dependencies.zip")
upload_path = os.path.join(storage_base_path, pack_name, f"{pack_name}_with_dependencies.zip")
Path(pack_with_dep_path).mkdir(parents=True, exist_ok=True)
for current_pack in pack_and_its_dependencies:
logging.debug(f"Starting to collect zip of pack {current_pack.name}")
# zip the pack and each of the pack's dependencies (or copy existing zip if was already zipped)
if not (current_pack.zip_path and os.path.isfile(current_pack.zip_path)):
# the zip does not exist yet, zip the current pack
task_status = sign_and_zip_pack(current_pack, signature_key)
if not task_status:
# modify the pack's status to indicate the failure was in the dependencies zip step
current_pack.status = PackStatus.FAILED_CREATING_DEPENDENCIES_ZIP_SIGNING.name
logging.debug(f"Skipping uploading {pack.name} since failed zipping {current_pack.name}.")
continue
shutil.copy(current_pack.zip_path, os.path.join(pack_with_dep_path, current_pack.name + ".zip"))
logging.info(f"Zipping {pack_name} with its dependencies")
Pack.zip_folder_items(pack_with_dep_path, pack_with_dep_path, zip_with_deps_path)
shutil.rmtree(pack_with_dep_path)
logging.info(f"Uploading {pack_name} with its dependencies")
task_status, _, _ = pack.upload_to_storage(zip_with_deps_path, '', storage_bucket, True,
storage_base_path, overridden_upload_path=upload_path)
logging.info(f"{pack_name} with dependencies was{' not' if not task_status else ''} "
f"uploaded successfully")
if not task_status:
# modify the pack's status to indicate the failure was in the dependencies zip step
pack.status = PackStatus.FAILED_CREATING_DEPENDENCIES_ZIP_SIGNING.name
logging.warning(f"Skipping dependencies collection for {pack_name}. Failed zipping")
continue
shutil.copy(pack.zip_path, os.path.join(pack_with_dep_path, pack_name + ".zip"))
for dep_name in pack.all_levels_dependencies:
dep_pack = packs_for_current_marketplace_dict.get(dep_name)
if not (dep_pack.zip_path and os.path.isfile(dep_pack.zip_path)):
task_status = sign_and_zip_pack(dep_pack, signature_key)
if not task_status:
# modify the pack's status to indicate the failure was in the dependencies zip step
pack.status = PackStatus.FAILED_CREATING_DEPENDENCIES_ZIP_SIGNING.name
logging.error(f"Skipping dependency {pack_name}. Failed zipping")
continue
shutil.copy(dep_pack.zip_path, os.path.join(pack_with_dep_path, dep_name + '.zip'))
logging.info(f"Zipping {pack_name} with dependencies")
Pack.zip_folder_items(
pack_with_dep_path,
pack_with_dep_path,
zip_with_deps_path
)
shutil.rmtree(pack_with_dep_path)
logging.info(f"Uploading {pack_name} with dependencies")
task_status, _, _ = pack.upload_to_storage(
zip_pack_path=zip_with_deps_path,
latest_version='',
storage_bucket=storage_bucket,
override_pack=True,
storage_base_path=storage_base_path,
overridden_upload_path=upload_path
)
logging.info(f"{pack_name} with dependencies was{' not' if not task_status else ''} uploaded successfully")
if not task_status:
pack.status = PackStatus.FAILED_CREATING_DEPENDENCIES_ZIP_UPLOADING.name
pack.cleanup()
pack.status = PackStatus.FAILED_CREATING_DEPENDENCIES_ZIP_UPLOADING.name
pack.cleanup()
except Exception as e:
logging.error(traceback.format_exc())
logging.error(f"Failed uploading packs with dependencies: {e}")
Expand Down Expand Up @@ -1071,7 +1061,7 @@ def main():
is_bucket_upload_flow, ci_branch)

# detect packs to upload
pack_names = get_packs_names(target_packs, previous_commit_hash)
pack_names = get_packs_names(target_packs, previous_commit_hash) # list of the pack's ids
extract_packs_artifacts(packs_artifacts_path, extract_destination_path)
packs_list = [Pack(pack_name, os.path.join(extract_destination_path, pack_name)) for pack_name in pack_names
if os.path.exists(os.path.join(extract_destination_path, pack_name))]
Expand Down Expand Up @@ -1208,7 +1198,7 @@ def main():

pack.status = PackStatus.SUCCESS.name

logging.info(f"packs_with_missing_dependencies: {packs_with_missing_dependencies}")
logging.info(f"packs_with_missing_dependencies: {[pack.name for pack in packs_with_missing_dependencies]}")

# Going over all packs that were marked as missing dependencies,
# updating them with the new data for the new packs that were added to the index.zip
Expand Down
22 changes: 11 additions & 11 deletions Utils/trigger_test_upload_flow.sh
Original file line number Diff line number Diff line change
Expand Up @@ -102,17 +102,17 @@ if [ -n "$_force" ] && [ -n "$_storage_base_path"]; then
echo "Can not force upload while using a storage base path."
exit 1
fi
#if [[ -n "$_storage_base_path" ]] && [ "$_storage_base_path" != *content ]; then
# echo "$_storage_base_path"
# echo "The given storage base path should look like upload-flow/builds/branch_name/build_number/content."
# exit 1
#fi
#
#if [[ -n "$_storage_base_path" ]] && [ "$_storage_base_path" != upload-flow* ]; then
# echo $_storage_base_path
# echo "The given storage base path should look like upload-flow/builds/branch_name/build_number/content."
# exit 1
#fi
if [ -n "$_storage_base_path" ] && [[ "$_storage_base_path" != *content ]]; then
echo "$_storage_base_path"
echo "The given storage base path should look like upload-flow/builds/branch_name/build_number/content.1"
exit 1
fi

if [ -n "$_storage_base_path" ] && [[ "$_storage_base_path" != upload-flow* ]]; then
echo $_storage_base_path
echo "The given storage base path should look like upload-flow/builds/branch_name/build_number/content.2"
exit 1
fi

if [ -n "$_gitlab" ]; then

Expand Down

0 comments on commit 329050f

Please sign in to comment.