Skip to content

Commit

Permalink
wip - access controls
Browse files Browse the repository at this point in the history
TODO:
* cache keys - need to be context aware to prevent incorrect results
* ownership migration upgrade step
* complete unit tests for access controls
* restricted entity hydration and graphql response (chris)
  • Loading branch information
david-leifker committed Feb 21, 2024
1 parent ea4a9b2 commit 8d3ea31
Show file tree
Hide file tree
Showing 206 changed files with 2,949 additions and 860 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
import com.datahub.authentication.Actor;
import com.datahub.authentication.Authentication;
import com.datahub.plugins.auth.authorization.Authorizer;
import io.datahubproject.metadata.context.OperationContext;

/** Provided as input to GraphQL resolvers; used to carry information about GQL request context. */
public interface QueryContext {
Expand All @@ -25,4 +26,9 @@ default String getActorUrn() {

/** Returns the authorizer used to authorize specific actions. */
Authorizer getAuthorizer();

/**
* @return Returns the operational context
*/
OperationContext getOperationContext();
}
Original file line number Diff line number Diff line change
Expand Up @@ -2,17 +2,16 @@

import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument;

import com.datahub.authentication.Authentication;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.linkedin.datahub.graphql.QueryContext;
import com.linkedin.datahub.graphql.analytics.service.AnalyticsUtil;
import com.linkedin.datahub.graphql.generated.AnalyticsChart;
import com.linkedin.datahub.graphql.generated.AnalyticsChartGroup;
import com.linkedin.datahub.graphql.generated.BarChart;
import com.linkedin.datahub.graphql.generated.BarSegment;
import com.linkedin.datahub.graphql.generated.MetadataAnalyticsInput;
import com.linkedin.datahub.graphql.generated.NamedBar;
import com.linkedin.datahub.graphql.resolvers.ResolverUtils;
import com.linkedin.datahub.graphql.types.entitytype.EntityTypeMapper;
import com.linkedin.entity.client.EntityClient;
import com.linkedin.metadata.Constants;
Expand All @@ -22,6 +21,7 @@
import com.linkedin.metadata.search.utils.QueryUtils;
import graphql.schema.DataFetcher;
import graphql.schema.DataFetchingEnvironment;
import io.datahubproject.metadata.context.OperationContext;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
Expand All @@ -41,24 +41,24 @@ public final class GetMetadataAnalyticsResolver implements DataFetcher<List<Anal

@Override
public final List<AnalyticsChartGroup> get(DataFetchingEnvironment environment) throws Exception {
final Authentication authentication = ResolverUtils.getAuthentication(environment);
final QueryContext context = environment.getContext();
final MetadataAnalyticsInput input =
bindArgument(environment.getArgument("input"), MetadataAnalyticsInput.class);

try {
final AnalyticsChartGroup group = new AnalyticsChartGroup();
group.setGroupId("FilteredMetadataAnalytics");
group.setTitle("");
group.setCharts(getCharts(input, authentication));
group.setCharts(getCharts(input, context.getOperationContext()));
return ImmutableList.of(group);
} catch (Exception e) {
log.error("Failed to retrieve metadata analytics!", e);
return Collections.emptyList(); // Simply return nothing.
}
}

private List<AnalyticsChart> getCharts(
MetadataAnalyticsInput input, Authentication authentication) throws Exception {
private List<AnalyticsChart> getCharts(MetadataAnalyticsInput input, OperationContext opContext)
throws Exception {
final List<AnalyticsChart> charts = new ArrayList<>();

List<String> entities = Collections.emptyList();
Expand All @@ -77,8 +77,7 @@ private List<AnalyticsChart> getCharts(
}

SearchResult searchResult =
_entityClient.searchAcrossEntities(
entities, query, filter, 0, 0, null, null, authentication);
_entityClient.searchAcrossEntities(opContext, entities, query, filter, 0, 0, null, null);

List<AggregationMetadata> aggregationMetadataList =
searchResult.getMetadata().getAggregations();
Expand All @@ -96,7 +95,7 @@ private List<AnalyticsChart> getCharts(
Constants.DOMAIN_ENTITY_NAME,
ImmutableSet.of(Constants.DOMAIN_PROPERTIES_ASPECT_NAME),
AnalyticsUtil::getDomainName,
authentication);
opContext.getSessionAuthentication());
charts.add(BarChart.builder().setTitle("Entities by Domain").setBars(domainChart).build());
}

Expand All @@ -113,7 +112,7 @@ private List<AnalyticsChart> getCharts(
Constants.DATA_PLATFORM_ENTITY_NAME,
ImmutableSet.of(Constants.DATA_PLATFORM_INFO_ASPECT_NAME),
AnalyticsUtil::getPlatformName,
authentication);
opContext.getSessionAuthentication());
charts.add(
BarChart.builder().setTitle("Entities by Platform").setBars(platformChart).build());
}
Expand All @@ -132,7 +131,7 @@ private List<AnalyticsChart> getCharts(
ImmutableSet.of(
Constants.GLOSSARY_TERM_KEY_ASPECT_NAME, Constants.GLOSSARY_TERM_INFO_ASPECT_NAME),
AnalyticsUtil::getTermName,
authentication);
opContext.getSessionAuthentication());
charts.add(BarChart.builder().setTitle("Entities by Term").setBars(termChart).build());
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -65,13 +65,13 @@ public CompletableFuture<ListAccessTokenResult> get(DataFetchingEnvironment envi
.setOrder(SortOrder.DESCENDING);
final SearchResult searchResult =
_entityClient.search(
context.getOperationContext(),
Constants.ACCESS_TOKEN_ENTITY_NAME,
"",
buildFilter(filters, Collections.emptyList()),
sortCriterion,
start,
count,
getAuthentication(environment),
new SearchFlags().setFulltext(true));

final List<AccessTokenMetadata> tokens =
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -79,6 +79,7 @@ public CompletableFuture<SearchResults> get(final DataFetchingEnvironment enviro

return UrnSearchResultsMapper.map(
_entityClient.searchAcrossEntities(
context.getOperationContext(),
CONTAINABLE_ENTITY_NAMES,
query,
new Filter()
Expand All @@ -90,8 +91,7 @@ public CompletableFuture<SearchResults> get(final DataFetchingEnvironment enviro
start,
count,
null,
null,
context.getAuthentication()));
null));

} catch (Exception e) {
throw new RuntimeException(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -151,14 +151,14 @@ public CompletableFuture<SearchResults> get(DataFetchingEnvironment environment)

return UrnSearchResultsMapper.map(
_entityClient.searchAcrossEntities(
context.getOperationContext(),
finalEntityNames,
sanitizedQuery,
finalFilter,
start,
count,
searchFlags,
null,
ResolverUtils.getAuthentication(environment)));
null));
} catch (Exception e) {
log.error(
"Failed to execute search for data product assets: entity types {}, query {}, filters: {}, start: {}, count: {}",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -86,6 +86,7 @@ public CompletableFuture<SearchResults> get(final DataFetchingEnvironment enviro

return UrnSearchResultsMapper.map(
_entityClient.searchAcrossEntities(
context.getOperationContext(),
SEARCHABLE_ENTITY_TYPES.stream()
.map(EntityTypeMapper::getName)
.collect(Collectors.toList()),
Expand All @@ -97,8 +98,7 @@ public CompletableFuture<SearchResults> get(final DataFetchingEnvironment enviro
start,
count,
null,
null,
context.getAuthentication()));
null));

} catch (Exception e) {
throw new RuntimeException(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -62,6 +62,7 @@ public CompletableFuture<ListDomainsResult> get(final DataFetchingEnvironment en
// First, get all domain Urns.
final SearchResult gmsResult =
_entityClient.search(
context.getOperationContext(),
Constants.DOMAIN_ENTITY_NAME,
query,
filter,
Expand All @@ -70,7 +71,6 @@ public CompletableFuture<ListDomainsResult> get(final DataFetchingEnvironment en
.setOrder(SortOrder.DESCENDING),
start,
count,
context.getAuthentication(),
new SearchFlags().setFulltext(true));

// Now that we have entities we can bind this to a result.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -140,7 +140,7 @@ private Map<Urn, EntityResponse> getTermsWithSameParent(Urn parentNode, QueryCon
final Filter filter = buildParentNodeFilter(parentNode);
final SearchResult searchResult =
_entityClient.filter(
GLOSSARY_TERM_ENTITY_NAME, filter, null, 0, 1000, context.getAuthentication());
context.getOperationContext(), GLOSSARY_TERM_ENTITY_NAME, filter, null, 0, 1000);

final List<Urn> termUrns =
searchResult.getEntities().stream()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -53,12 +53,12 @@ public CompletableFuture<GetRootGlossaryNodesResult> get(
final Filter filter = buildGlossaryEntitiesFilter();
final SearchResult gmsNodesResult =
_entityClient.filter(
context.getOperationContext(),
Constants.GLOSSARY_NODE_ENTITY_NAME,
filter,
null,
start,
count,
context.getAuthentication());
count);

final List<Urn> glossaryNodeUrns =
gmsNodesResult.getEntities().stream()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -53,12 +53,12 @@ public CompletableFuture<GetRootGlossaryTermsResult> get(
final Filter filter = buildGlossaryEntitiesFilter();
final SearchResult gmsTermsResult =
_entityClient.filter(
context.getOperationContext(),
Constants.GLOSSARY_TERM_ENTITY_NAME,
filter,
null,
start,
count,
context.getAuthentication());
count);

final List<Urn> glossaryTermUrns =
gmsTermsResult.getEntities().stream()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -58,6 +58,7 @@ public CompletableFuture<ListGroupsResult> get(final DataFetchingEnvironment env
// First, get all group Urns.
final SearchResult gmsResult =
_entityClient.search(
context.getOperationContext(),
CORP_GROUP_ENTITY_NAME,
query,
null,
Expand All @@ -66,7 +67,6 @@ public CompletableFuture<ListGroupsResult> get(final DataFetchingEnvironment env
.setOrder(SortOrder.DESCENDING),
start,
count,
context.getAuthentication(),
new SearchFlags().setFulltext(true));

// Then, get hydrate all groups.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -67,6 +67,7 @@ public CompletableFuture<IngestionSourceExecutionRequests> get(

final SearchResult executionsSearchResult =
_entityClient.filter(
context.getOperationContext(),
Constants.EXECUTION_REQUEST_ENTITY_NAME,
new Filter()
.setOr(
Expand All @@ -78,8 +79,7 @@ public CompletableFuture<IngestionSourceExecutionRequests> get(
.setField(REQUEST_TIME_MS_FIELD_NAME)
.setOrder(SortOrder.DESCENDING),
start,
count,
context.getAuthentication());
count);

// 2. Batch fetch the related ExecutionRequests
final Set<Urn> relatedExecRequests =
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -67,6 +67,7 @@ public CompletableFuture<ListSecretsResult> get(final DataFetchingEnvironment en
// First, get all secrets
final SearchResult gmsResult =
_entityClient.search(
context.getOperationContext(),
Constants.SECRETS_ENTITY_NAME,
query,
null,
Expand All @@ -75,7 +76,6 @@ public CompletableFuture<ListSecretsResult> get(final DataFetchingEnvironment en
.setOrder(SortOrder.DESCENDING),
start,
count,
context.getAuthentication(),
new SearchFlags().setFulltext(true));

// Then, resolve all secrets
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -63,13 +63,13 @@ public CompletableFuture<ListIngestionSourcesResult> get(
// First, get all ingestion sources Urns.
final SearchResult gmsResult =
_entityClient.search(
context.getOperationContext(),
Constants.INGESTION_SOURCE_ENTITY_NAME,
query,
buildFilter(filters, Collections.emptyList()),
null,
start,
count,
context.getAuthentication(),
new SearchFlags().setFulltext(true));

// Then, resolve all ingestion sources
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -63,12 +63,12 @@ public CompletableFuture<DataProcessInstanceResult> get(DataFetchingEnvironment
final SortCriterion sortCriterion = buildTaskRunsSortCriterion();
final SearchResult gmsResult =
_entityClient.filter(
context.getOperationContext(),
Constants.DATA_PROCESS_INSTANCE_ENTITY_NAME,
filter,
sortCriterion,
start,
count,
context.getAuthentication());
count);
final List<Urn> dataProcessInstanceUrns =
gmsResult.getEntities().stream()
.map(SearchEntity::getEntity)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -69,12 +69,12 @@ public CompletableFuture<DataProcessInstanceResult> get(DataFetchingEnvironment
final SortCriterion sortCriterion = buildTaskRunsSortCriterion();
final SearchResult gmsResult =
_entityClient.filter(
context.getOperationContext(),
Constants.DATA_PROCESS_INSTANCE_ENTITY_NAME,
filter,
sortCriterion,
start,
count,
context.getAuthentication());
count);
final List<Urn> dataProcessInstanceUrns =
gmsResult.getEntities().stream()
.map(SearchEntity::getEntity)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -212,7 +212,7 @@ public static boolean hasChildDomains(
// Limit count to 1 for existence check
final SearchResult searchResult =
entityClient.filter(
DOMAIN_ENTITY_NAME, parentDomainFilter, null, 0, 1, context.getAuthentication());
context.getOperationContext(), DOMAIN_ENTITY_NAME, parentDomainFilter, null, 0, 1);
return (searchResult.getNumEntities() > 0);
}

Expand All @@ -226,7 +226,7 @@ private static Map<Urn, EntityResponse> getDomainsByNameAndParent(

final SearchResult searchResult =
entityClient.filter(
DOMAIN_ENTITY_NAME, filter, null, 0, 1000, context.getAuthentication());
context.getOperationContext(), DOMAIN_ENTITY_NAME, filter, null, 0, 1000);

final Set<Urn> domainUrns =
searchResult.getEntities().stream()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -60,13 +60,13 @@ public CompletableFuture<ListOwnershipTypesResult> get(DataFetchingEnvironment e

final SearchResult gmsResult =
_entityClient.search(
context.getOperationContext(),
Constants.OWNERSHIP_TYPE_ENTITY_NAME,
query,
buildFilter(filters, Collections.emptyList()),
DEFAULT_SORT_CRITERION,
start,
count,
context.getAuthentication(),
new SearchFlags().setFulltext(true));

final ListOwnershipTypesResult result = new ListOwnershipTypesResult();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ public CompletableFuture<ListPoliciesResult> get(final DataFetchingEnvironment e
final String query = input.getQuery() == null ? DEFAULT_QUERY : input.getQuery();

return _policyFetcher
.fetchPolicies(start, query, count, context.getAuthentication())
.fetchPolicies(context.getOperationContext(), start, query, count)
.thenApply(
policyFetchResult -> {
final ListPoliciesResult result = new ListPoliciesResult();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -57,13 +57,13 @@ public CompletableFuture<ListPostsResult> get(final DataFetchingEnvironment envi
// First, get all Post Urns.
final SearchResult gmsResult =
_entityClient.search(
context.getOperationContext(),
POST_ENTITY_NAME,
query,
null,
sortCriterion,
start,
count,
context.getAuthentication(),
new SearchFlags().setFulltext(true));

// Then, get and hydrate all Posts.
Expand Down
Loading

0 comments on commit 8d3ea31

Please sign in to comment.