Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

v1.2.0 #109

Open
wants to merge 21 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
45 changes: 45 additions & 0 deletions .github/workflows/test_PRs.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
# This workflow is designed to run through the process of installing, building, and executing
# basic PMapper unittests against PMapper's supported versions when there's a new PR aiming
# at the "master" branch

name: "Test Against Pythons"

on:
pull_request:
branches: [ master ]
workflow_dispatch:
permissions:
actions: read
issues: write
contents: read
discussions: write

jobs:
build_and_test:
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix:
os: ["ubuntu-latest", "windows-latest", "macos-latest"]
python-version: ["3.6", "3.10"]
steps:
- name: "Grab Code"
uses: actions/checkout@v2

- name: "Install Python"
uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python-version }}

- name: "Install PMapper"
shell: bash
working-directory: ${{ github.workspace }}
run: |
pip install .
pip show principalmapper

- name: "Run Test Cases"
shell: bash
working-directory: ${{ github.workspace }}
run: |
python -m unittest -v tests/test*
2 changes: 1 addition & 1 deletion examples/graph_from_cf_template.py
Original file line number Diff line number Diff line change
Expand Up @@ -144,7 +144,7 @@ def main():
edges = iam_edges.generate_edges_locally(nodes) + sts_edges.generate_edges_locally(nodes)

# Create our graph and finish
graph = Graph(nodes, edges, policies, groups, metadata)
graph = Graph(nodes, edges, policies, groups, '000000000000', 'aws', metadata)
graph_actions.print_graph_data(graph)


Expand Down
2 changes: 1 addition & 1 deletion principalmapper/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,4 +15,4 @@
# You should have received a copy of the GNU Affero General Public License
# along with Principal Mapper. If not, see <https://www.gnu.org/licenses/>.

__version__ = '1.1.5'
__version__ = '1.2.0'
6 changes: 6 additions & 0 deletions principalmapper/__main__.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
import logging
import sys

import principalmapper
from principalmapper.analysis import cli as analysis_cli
from principalmapper.graphing import graph_cli
from principalmapper.graphing import orgs_cli
Expand Down Expand Up @@ -49,6 +50,11 @@ def main() -> int:
action='store_true',
help='Produces debug-level output of the underlying Principal Mapper library during execution.'
)
argument_parser.add_argument(
'--version',
action='version',
version=f'Principal Mapper v{principalmapper.__version__}'
)

# Create subparser for various subcommands
subparser = argument_parser.add_subparsers(
Expand Down
10 changes: 7 additions & 3 deletions principalmapper/analysis/find_risks.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,8 @@ def gen_report(graph: Graph) -> Report:
"""Generates a Report object with findings and metadata about report-generation"""
findings = gen_all_findings(graph)
return Report(
graph.metadata['account_id'],
graph.account,
graph.partition,
dt.datetime.now(dt.timezone.utc),
findings,
'Findings identified using Principal Mapper ({}) from NCC Group: https://github.com/nccgroup/PMapper'.format(
Expand Down Expand Up @@ -475,7 +476,7 @@ def gen_resources_with_potential_confused_deputies(graph: Graph) -> List[Finding
for action in action_list:
rpa_result = resource_policy_authorization(
service,
graph.metadata['account_id'],
graph.account,
policy.policy_doc,
action,
policy.arn,
Expand Down Expand Up @@ -523,7 +524,10 @@ def print_report(report: Report) -> None:
print('----------------------------------------------------------------')
print('# Principal Mapper Findings')
print()
print('Findings identified in AWS account {}'.format(report.account))
if report.partition == 'aws':
print('Findings identified in AWS account {}'.format(report.account))
else:
print(f'Findings identified in AWS account {report.account} ({report.partition})')
print()
print('Date and Time: {}'.format(report.date_and_time.isoformat()))
print()
Expand Down
4 changes: 3 additions & 1 deletion principalmapper/analysis/report.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,8 +26,9 @@ class Report:
utility function to convert the contents of the report to a dictionary object.
"""

def __init__(self, account: str, date_and_time: dt.datetime, findings: List[Finding], source: str):
def __init__(self, account: str, partition: str, date_and_time: dt.datetime, findings: List[Finding], source: str):
self.account = account
self.partition = partition
self.date_and_time = date_and_time
self.findings = findings
self.source = source
Expand All @@ -36,6 +37,7 @@ def as_dictionary(self) -> dict:
"""Produces a dictionary representing this Report's contents."""
return {
'account': self.account,
'partition': self.partition,
'date_and_time': self.date_and_time.isoformat(),
'findings': [x.as_dictionary() for x in self.findings],
'source': self.source
Expand Down
61 changes: 46 additions & 15 deletions principalmapper/common/graphs.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,21 +40,34 @@ class Graph(object):
Graph data to/from files stored on-disk. The actual attributes of each graph/node/edge/policy/group object
will remain the same across the same major+minor version of Principal Mapper, so a graph generated in v1.0.0
should be loadable in v1.0.1, but not v1.1.0.

* **v1.2.0:** Shifted account/partition to arguments of Graph construction
"""

def __init__(self, nodes: list = None, edges: list = None, policies: list = None, groups: list = None,
metadata: dict = None):
def __init__(self, nodes: list, edges: list, policies: list, groups: list, account: str, partition: str,
metadata: dict):
"""Constructor"""
for arg, value in {'nodes': nodes, 'edges': edges, 'policies': policies, 'groups': groups,
'metadata': metadata}.items():
if value is None:
raise ValueError('Required constructor argument {} was None'.format(arg))

for x in (nodes, edges, policies, groups, account, partition, metadata):
if x is None:
raise ValueError(f'Required argument {x} was None')

self.nodes = nodes
self.edges = edges
self.policies = policies
self.groups = groups
if 'account_id' not in metadata:
raise ValueError('Incomplete metadata input, expected key: "account_id"')

self.account = account
if not isinstance(account, str):
raise ValueError('Parameter `account` is expected to be str')

self.partition = partition
if not isinstance(partition, str):
raise ValueError('Parameter `partition` is expected to be str')

if not isinstance(metadata, dict):
raise ValueError('Parameter `metadata` is expected to be dict')

if 'pmapper_version' not in metadata:
raise ValueError('Incomplete metadata input, expected key: "pmapper_version"')
self.metadata = metadata
Expand All @@ -73,6 +86,7 @@ def store_graph_as_json(self, root_directory: str):

Structure:
| <root_directory parameter>
|---- data.json
|---- metadata.json
|---- graph/
|-------- nodes.json
Expand All @@ -88,13 +102,16 @@ def store_graph_as_json(self, root_directory: str):
graphdir = os.path.join(rootpath, 'graph')
if not os.path.exists(graphdir):
os.makedirs(graphdir, 0o700)
regulardatafilepath = os.path.join(rootpath, 'data.json')
metadatafilepath = os.path.join(rootpath, 'metadata.json')
nodesfilepath = os.path.join(graphdir, 'nodes.json')
edgesfilepath = os.path.join(graphdir, 'edges.json')
policiesfilepath = os.path.join(graphdir, 'policies.json')
groupsfilepath = os.path.join(graphdir, 'groups.json')

old_umask = os.umask(0o077) # block rwx for group/all
with open(regulardatafilepath, 'w') as f:
json.dump({'account': self.account, 'partition': self.partition}, f, indent=4)
with open(metadatafilepath, 'w') as f:
json.dump(self.metadata, f, indent=4)
with open(nodesfilepath, 'w') as f:
Expand All @@ -113,6 +130,7 @@ def create_graph_from_local_disk(cls, root_directory: str):

Structure:
| <root_directory parameter>
|---- data.json
|---- metadata.json
|---- graph/
|-------- nodes.json
Expand All @@ -132,6 +150,7 @@ def create_graph_from_local_disk(cls, root_directory: str):
raise ValueError('Did not find file at: {}'.format(rootpath))
graphdir = os.path.join(rootpath, 'graph')
metadatafilepath = os.path.join(rootpath, 'metadata.json')
regulardatafilepath = os.path.join(rootpath, 'data.json')
nodesfilepath = os.path.join(graphdir, 'nodes.json')
edgesfilepath = os.path.join(graphdir, 'edges.json')
policiesfilepath = os.path.join(graphdir, 'policies.json')
Expand All @@ -142,12 +161,16 @@ def create_graph_from_local_disk(cls, root_directory: str):

current_pmapper_version = packaging.version.parse(principalmapper.__version__)
loaded_graph_version = packaging.version.parse(metadata['pmapper_version'])
if current_pmapper_version.release[0] != loaded_graph_version.release[0] or \
current_pmapper_version.release[1] != loaded_graph_version.release[1]:
raise ValueError('Loaded Graph data was from a different version of Principal Mapper ({}), but the current '
'version of Principal Mapper ({}) may not support it. Either update the stored Graph data '
'and its metadata, or regraph the account.'.format(loaded_graph_version,
current_pmapper_version))
if current_pmapper_version.major != loaded_graph_version.major or current_pmapper_version.minor != loaded_graph_version.minor:
raise ValueError(
f'The loaded Graph data came from a different version of Principal Mapper '
f'({str(loaded_graph_version)}) that is not compatible with this version of Principal Mapper '
f'({str(current_pmapper_version)}). You will need to recreate the organization (`pmapper orgs '
f'create`).'
)

with open(regulardatafilepath) as f:
acctdata = json.load(f) # type: dict

policies = []
with open(policiesfilepath) as f:
Expand Down Expand Up @@ -216,4 +239,12 @@ def create_graph_from_local_disk(cls, root_directory: str):
edges.append(Edge(source=source, destination=destination, reason=edge['reason'],
short_reason=edge['short_reason']))

return Graph(nodes=nodes, edges=edges, policies=policies, groups=groups, metadata=metadata)
return Graph(
nodes=nodes,
edges=edges,
policies=policies,
groups=groups,
account=acctdata.get('account'),
partition=acctdata.get('partition'),
metadata=metadata
)
12 changes: 11 additions & 1 deletion principalmapper/common/nodes.py
Original file line number Diff line number Diff line change
Expand Up @@ -105,7 +105,8 @@ def get_outbound_edges(self, graph): # -> List[Edge], can't import Edge/Graph i
self.cache['outbound_edges'] = []
if self.is_admin:
for node in graph.nodes:
if node == self:
# skip self-links and links to service-linked roles (not even accessible to admins)
if node == self or node.is_service_linked_role():
continue
else:
self.cache['outbound_edges'].append(
Expand All @@ -119,6 +120,15 @@ def get_outbound_edges(self, graph): # -> List[Edge], can't import Edge/Graph i
self.cache['outbound_edges'].append(edge)
return self.cache['outbound_edges']

def is_service_linked_role(self):
if 'is_service_linked_role' not in self.cache:
if ':role/' in self.arn:
role_name = self.arn.split('/')[-1]
self.cache['is_service_linked_role'] = role_name.startswith('AWSServiceRoleFor')
else:
self.cache['is_service_linked_role'] = False
return self.cache['is_service_linked_role']

def to_dictionary(self) -> dict:
"""Creates a dictionary representation of this Node for storage."""
_pb = self.permissions_boundary
Expand Down
27 changes: 24 additions & 3 deletions principalmapper/common/org_trees.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,10 @@
import os.path
from typing import List, Optional, Tuple

import packaging
import packaging.version

import principalmapper
from principalmapper.common import Edge
from principalmapper.common.policies import Policy

Expand Down Expand Up @@ -86,10 +90,13 @@ def as_dictionary(self) -> dict:
class OrganizationTree(object):
"""The OrganizationGraph object represents an AWS Organization, which is a collection of AWS accounts. These
accounts are organized in a hierarchy (we use a tree for this).

* **v1.2.0:** Added the required 'partition' field
"""

def __init__(self, org_id: str, management_account_id: str, root_ous: List[OrganizationNode],
all_scps: List[Policy], accounts: List[str], edge_list: List[Edge], metadata: dict):
all_scps: List[Policy], accounts: List[str], edge_list: List[Edge], metadata: dict,
partition: str):
self.org_id = org_id
self.management_account_id = management_account_id
self.root_ous = root_ous
Expand All @@ -99,6 +106,7 @@ def __init__(self, org_id: str, management_account_id: str, root_ous: List[Organ
if 'pmapper_version' not in metadata:
raise ValueError('The pmapper_version key/value (str) is required: {"pmapper_version": "..."}')
self.metadata = metadata
self.partition = partition

def as_dictionary(self) -> dict:
"""Returns a dictionary representation of this OrganizationTree object. Used for serialization to disk. We
Expand All @@ -109,7 +117,8 @@ def as_dictionary(self) -> dict:
'management_account_id': self.management_account_id,
'root_ous': [x.as_dictionary() for x in self.root_ous],
'edge_list': [x.to_dictionary() for x in self.edge_list],
'accounts': self.accounts
'accounts': self.accounts,
'partition': self.partition
}

def save_organization_to_disk(self, dirpath: str):
Expand Down Expand Up @@ -163,6 +172,17 @@ def create_from_dir(cls, dirpath: str):
with open(metadata_filepath) as fd:
metadata_obj = json.load(fd)

# verify pmapper_version
current_pmapper_version = packaging.version.parse(principalmapper.__version__)
loaded_orgtree_version = packaging.version.parse(metadata_obj['pmapper_version'])
if current_pmapper_version.major != loaded_orgtree_version.major or current_pmapper_version.minor != loaded_orgtree_version.minor:
raise ValueError(
f'The loaded organization data came from a different version of Principal Mapper '
f'({str(loaded_orgtree_version)}) that is not compatible with this version of Principal Mapper '
f'({str(current_pmapper_version)}). You will need to recreate the organization (`pmapper orgs '
f'create`).'
)

# load the OrganizationX objects
org_datafile_path = os.path.join(dirpath, 'org_data.json')
with open(org_datafile_path) as fd:
Expand All @@ -188,5 +208,6 @@ def _produce_ou(ou_dict: dict) -> OrganizationNode:
[x for x in policies.values()],
org_dictrepr['accounts'],
org_dictrepr['edge_list'],
metadata_obj
metadata_obj,
org_dictrepr['partition']
)
14 changes: 8 additions & 6 deletions principalmapper/graphing/autoscaling_edges.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ class AutoScalingEdgeChecker(EdgeChecker):

def return_edges(self, nodes: List[Node], region_allow_list: Optional[List[str]] = None,
region_deny_list: Optional[List[str]] = None, scps: Optional[List[List[dict]]] = None,
client_args_map: Optional[dict] = None) -> List[Edge]:
client_args_map: Optional[dict] = None, partition: str = 'aws') -> List[Edge]:
"""Fulfills expected method return_edges."""

logger.info('Generating Edges based on EC2 Auto Scaling.')
Expand All @@ -48,7 +48,7 @@ def return_edges(self, nodes: List[Node], region_allow_list: Optional[List[str]]
# Gather projects information for each region
autoscaling_clients = []
if self.session is not None:
as_regions = botocore_tools.get_regions_to_search(self.session, 'autoscaling', region_allow_list, region_deny_list)
as_regions = botocore_tools.get_regions_to_search(self.session, 'autoscaling', region_allow_list, region_deny_list, partition)
for region in as_regions:
autoscaling_clients.append(self.session.create_client('autoscaling', region_name=region, **asargs))

Expand All @@ -67,14 +67,16 @@ def return_edges(self, nodes: List[Node], region_allow_list: Optional[List[str]]
})

except ClientError as ex:
logger.warning('Unable to search region {} for launch configs. The region may be disabled, or the error may '
'be caused by an authorization issue. Continuing.'.format(as_client.meta.region_name))
logger.debug('Exception details: {}'.format(ex))
logger.warning(
f'Unable to search region {as_client.meta.region_name} for launch configs. The region may be '
f'disabled, or the error may be caused by an authorization issue. Continuing.'
)
logger.debug(f'Exception details: {ex}')

result = generate_edges_locally(nodes, scps, launch_configs)

for edge in result:
logger.info("Found new edge: {}".format(edge.describe_edge()))
logger.info(f"Found new edge: {edge.describe_edge()}")

return result

Expand Down
Loading