Skip to content

Commit

Permalink
fix notebooks
Browse files Browse the repository at this point in the history
  • Loading branch information
Adam Janovsky committed Jan 22, 2025
1 parent 6b0a27c commit 83017b2
Show file tree
Hide file tree
Showing 5 changed files with 22 additions and 27 deletions.
5 changes: 3 additions & 2 deletions notebooks/cc/cpe_eval.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,8 @@
"from sec_certs.dataset import CCDataset\n",
"import pandas as pd\n",
"import json\n",
"import tempfile"
"import tempfile\n",
"from sec_certs.utils.label_studio_utils import to_label_studio_json"
]
},
{
Expand Down Expand Up @@ -58,7 +59,7 @@
"with tempfile.TemporaryDirectory() as tmp_dir:\n",
" dset.root_dir = tmp_dir\n",
" dset.certs = {x.dgst: x for x in dset if x.dgst in eval_certs.index.tolist()}\n",
" dset.to_label_studio_json(\"./label_studio_input_data.json\", update_json=False)"
" to_label_studio_json(dset, \"./label_studio_input_data.json\")"
]
},
{
Expand Down
7 changes: 4 additions & 3 deletions notebooks/cc/scheme_eval.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,8 @@
"from sec_certs.model import CCSchemeMatcher\n",
"from sec_certs.sample.cc_certificate_id import canonicalize\n",
"from sec_certs.sample.cc_scheme import CCScheme, EntryType\n",
"from sec_certs.configuration import config"
"from sec_certs.configuration import config\n",
"from sec_certs.dataset.auxiliary_dataset_handling import CCSchemeDatasetHandler"
]
},
{
Expand Down Expand Up @@ -56,7 +57,7 @@
"metadata": {},
"outputs": [],
"source": [
"dset.auxiliary_datasets.scheme_dset = schemes\n",
"dset.aux_handlers[CCSchemeDatasetHandler].dset = schemes\n",
"\n",
"count_was = 0\n",
"count_is = 0\n",
Expand Down Expand Up @@ -161,7 +162,7 @@
" rate = len(assigned)/len(total) * 100 if len(total) != 0 else 0\n",
" rate_list = rates.setdefault(country, [])\n",
" rate_list.append(rate)\n",
" \n",
"\n",
" print(f\"{country}: {len(assigned)} assigned out of {len(total)} -> {rate:.1f}%\")\n",
" total_active = total[total[\"status\"] == \"active\"]\n",
" assigned_active = assigned[assigned[\"status\"] == \"active\"]\n",
Expand Down
13 changes: 5 additions & 8 deletions notebooks/cc/vulnerabilities.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@
"import warnings\n",
"from pathlib import Path\n",
"import tempfile\n",
"from sec_certs.dataset.auxiliary_dataset_handling import CVEDatasetHandler, CPEDatasetHandler, CCMaintenanceUpdateDatasetHandler\n",
"from sec_certs.dataset import CCDataset, CCDatasetMaintenanceUpdates, CVEDataset, CPEDataset\n",
"from sec_certs.utils.pandas import (\n",
" compute_cve_correlations,\n",
Expand Down Expand Up @@ -82,15 +83,11 @@
"\n",
"# # Remote instantiation (takes approx. 10 minutes to complete)\n",
"# dset: CCDataset = CCDataset.from_web_latest(path=\"dset\", auxiliary_datasets=True)\n",
"# dset.load_auxiliary_datasets()\n",
"\n",
"# print(\"Downloading dataset of maintenance updates\")\n",
"# main_dset: CCDatasetMaintenanceUpdates = CCDatasetMaintenanceUpdates.from_web_latest()\n",
"\n",
"# print(\"Downloading CPE dataset\")\n",
"# cpe_dset: CPEDataset = dset.auxiliary_datasets.cpe_dset\n",
"\n",
"# print(\"Downloading CVE dataset\")\n",
"# cve_dset: CVEDataset = dset.auxiliary_datasets.cve_dset"
"# main_dset: CCDatasetMaintenanceUpdates = dset.aux_handlers[CCMaintenanceUpdateDatasetHandler].dset\n",
"# cpe_dset: CPEDataset = dset.aux_handlers[CPEDatasetHandler].dset\n",
"# cve_dset: CVEDataset = dset.aux_handlers[CVEDatasetHandler].dset"
]
},
{
Expand Down
22 changes: 9 additions & 13 deletions notebooks/fips/vulnerabilities.ipynb
Original file line number Diff line number Diff line change
@@ -1,10 +1,12 @@
{
"cells": [
{
"metadata": {},
"cell_type": "markdown",
"source": "# Vulnerability analysis",
"id": "3a0981d008383c12"
"id": "3a0981d008383c12",
"metadata": {},
"source": [
"# Vulnerability analysis"
]
},
{
"cell_type": "code",
Expand All @@ -21,6 +23,7 @@
"from sec_certs.dataset.fips import FIPSDataset\n",
"from sec_certs.dataset.cpe import CPEDataset\n",
"from sec_certs.dataset.cve import CVEDataset\n",
"from sec_certs.dataset.auxiliary_dataset_handling import CPEDatasetHandler, CVEDatasetHandler\n",
"from sec_certs.utils.pandas import expand_df_with_cve_cols\n",
"import pandas as pd\n",
"import seaborn as sns\n",
Expand All @@ -47,8 +50,9 @@
"metadata": {},
"outputs": [],
"source": [
"cve_dset: CVEDataset = dset.auxiliary_datasets.cve_dset\n",
"cpe_dset: CPEDataset = dset.auxiliary_datasets.cpe_dset"
"dset.load_auxiliary_datasets()\n",
"cve_dset: CVEDataset = dset.aux_handlers[CVEDatasetHandler].dset\n",
"cpe_dset: CPEDataset = dset.aux_handlers[CPEDatasetHandler].dset"
]
},
{
Expand Down Expand Up @@ -181,14 +185,6 @@
"g = sns.relplot(data=df_cve_rich, x=\"level\", y=\"avg_cve_score\")\n",
"plt.show()"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "6c3c2ec4-3fab-48ad-aacb-6f54277abe66",
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
Expand Down
2 changes: 1 addition & 1 deletion src/sec_certs/dataset/cc.py
Original file line number Diff line number Diff line change
Expand Up @@ -845,7 +845,7 @@ def _compute_heuristics_body(self, skip_schemes: bool = False) -> None:
compute_transitive_vulnerabilities(self.certs)

if not skip_schemes:
compute_scheme_data(self.aux_handlers[CCSchemeDatasetHandler].dset, self.certs.values())
compute_scheme_data(self.aux_handlers[CCSchemeDatasetHandler].dset, self.certs)

compute_cert_labs(self.certs.values())
compute_sars(self.certs.values())
Expand Down

0 comments on commit 83017b2

Please sign in to comment.