Skip to content

Commit

Permalink
Merge branch 'master' into feat_glue_empty_databases
Browse files Browse the repository at this point in the history
  • Loading branch information
skrydal authored Jun 25, 2024
2 parents 5a72510 + 5735d9e commit 3111da0
Show file tree
Hide file tree
Showing 390 changed files with 10,934 additions and 2,669 deletions.
16 changes: 8 additions & 8 deletions build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -32,29 +32,29 @@ buildscript {

ext.junitJupiterVersion = '5.6.1'
// Releases: https://github.com/linkedin/rest.li/blob/master/CHANGELOG.md
ext.pegasusVersion = '29.51.6'
ext.pegasusVersion = '29.57.0'
ext.mavenVersion = '3.6.3'
ext.springVersion = '6.1.4'
ext.springBootVersion = '3.2.3'
ext.springKafkaVersion = '3.1.2'
ext.springVersion = '6.1.5'
ext.springBootVersion = '3.2.6'
ext.springKafkaVersion = '3.1.6'
ext.openTelemetryVersion = '1.18.0'
ext.neo4jVersion = '5.14.0'
ext.neo4jTestVersion = '5.14.0'
ext.neo4jApocVersion = '5.14.0'
ext.testContainersVersion = '1.17.4'
ext.elasticsearchVersion = '2.11.1' // ES 7.10, Opensearch 1.x, 2.x
ext.jacksonVersion = '2.15.3'
ext.jettyVersion = '11.0.19'
ext.playVersion = '2.8.21'
ext.log4jVersion = '2.19.0'
ext.jettyVersion = '11.0.21'
ext.playVersion = '2.8.22'
ext.log4jVersion = '2.23.1'
ext.slf4jVersion = '1.7.36'
ext.logbackClassic = '1.4.14'
ext.hadoop3Version = '3.3.5'
ext.kafkaVersion = '5.5.15'
ext.hazelcastVersion = '5.3.6'
ext.ebeanVersion = '12.16.1'
ext.googleJavaFormatVersion = '1.18.1'
ext.openLineageVersion = '1.14.0'
ext.openLineageVersion = '1.16.0'
ext.logbackClassicJava8 = '1.2.12'

ext.docker_registry = 'acryldata'
Expand Down
2 changes: 1 addition & 1 deletion datahub-frontend/app/controllers/Application.java
Original file line number Diff line number Diff line change
Expand Up @@ -155,7 +155,7 @@ AuthenticationConstants.LEGACY_X_DATAHUB_ACTOR_HEADER, getDataHubActorHeader(req
.setBody(
new InMemoryBodyWritable(
ByteString.fromByteBuffer(request.body().asBytes().asByteBuffer()),
"application/json"))
request.contentType().orElse("application/json")))
.setRequestTimeout(Duration.ofSeconds(120))
.execute()
.thenApply(
Expand Down
6 changes: 5 additions & 1 deletion datahub-frontend/conf/application.conf
Original file line number Diff line number Diff line change
Expand Up @@ -38,8 +38,12 @@ jwt {
play.server.provider = server.CustomAkkaHttpServerProvider
play.http.server.akka.max-header-count = 64
play.http.server.akka.max-header-count = ${?DATAHUB_AKKA_MAX_HEADER_COUNT}
play.server.akka.max-header-size = 8k
# max-header-size is reportedly no longer used
play.server.akka.max-header-size = 32k
play.server.akka.max-header-size = ${?DATAHUB_AKKA_MAX_HEADER_VALUE_LENGTH}
# max header value length seems to impact the actual limit
play.server.akka.max-header-value-length = 32k
play.server.akka.max-header-value-length = ${?DATAHUB_AKKA_MAX_HEADER_VALUE_LENGTH}

# Update AUTH_COOKIE_SAME_SITE and AUTH_COOKIE_SECURE in order to change how authentication cookies
# are configured. If you wish cookies to be sent in first and third party contexts, set
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -121,6 +121,8 @@
import com.linkedin.datahub.graphql.resolvers.assertion.AssertionRunEventResolver;
import com.linkedin.datahub.graphql.resolvers.assertion.DeleteAssertionResolver;
import com.linkedin.datahub.graphql.resolvers.assertion.EntityAssertionsResolver;
import com.linkedin.datahub.graphql.resolvers.assertion.ReportAssertionResultResolver;
import com.linkedin.datahub.graphql.resolvers.assertion.UpsertCustomAssertionResolver;
import com.linkedin.datahub.graphql.resolvers.auth.CreateAccessTokenResolver;
import com.linkedin.datahub.graphql.resolvers.auth.DebugAccessResolver;
import com.linkedin.datahub.graphql.resolvers.auth.GetAccessTokenMetadataResolver;
Expand Down Expand Up @@ -377,6 +379,7 @@
import com.linkedin.metadata.query.filter.SortCriterion;
import com.linkedin.metadata.query.filter.SortOrder;
import com.linkedin.metadata.recommendation.RecommendationsService;
import com.linkedin.metadata.service.AssertionService;
import com.linkedin.metadata.service.BusinessAttributeService;
import com.linkedin.metadata.service.DataProductService;
import com.linkedin.metadata.service.ERModelRelationshipService;
Expand Down Expand Up @@ -454,6 +457,7 @@ public class GmsGraphQLEngine {
private final FormService formService;
private final RestrictedService restrictedService;
private ConnectionService connectionService;
private AssertionService assertionService;

private final BusinessAttributeService businessAttributeService;
private final FeatureFlags featureFlags;
Expand Down Expand Up @@ -575,6 +579,7 @@ public GmsGraphQLEngine(final GmsGraphQLEngineArgs args) {
this.formService = args.formService;
this.restrictedService = args.restrictedService;
this.connectionService = args.connectionService;
this.assertionService = args.assertionService;

this.businessAttributeService = args.businessAttributeService;
this.ingestionConfiguration = Objects.requireNonNull(args.ingestionConfiguration);
Expand Down Expand Up @@ -1220,6 +1225,10 @@ private void configureMutationResolvers(final RuntimeWiring.Builder builder) {
"createTestConnectionRequest",
new CreateTestConnectionRequestResolver(
this.entityClient, this.ingestionConfiguration))
.dataFetcher(
"upsertCustomAssertion", new UpsertCustomAssertionResolver(assertionService))
.dataFetcher(
"reportAssertionResult", new ReportAssertionResultResolver(assertionService))
.dataFetcher(
"deleteAssertion",
new DeleteAssertionResolver(this.entityClient, this.entityService))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
import com.linkedin.metadata.graph.SiblingGraphService;
import com.linkedin.metadata.models.registry.EntityRegistry;
import com.linkedin.metadata.recommendation.RecommendationsService;
import com.linkedin.metadata.service.AssertionService;
import com.linkedin.metadata.service.BusinessAttributeService;
import com.linkedin.metadata.service.DataProductService;
import com.linkedin.metadata.service.ERModelRelationshipService;
Expand Down Expand Up @@ -86,6 +87,7 @@ public class GmsGraphQLEngineArgs {
boolean graphQLQueryIntrospectionEnabled;
BusinessAttributeService businessAttributeService;
ConnectionService connectionService;
AssertionService assertionService;

// any fork specific args should go below this line
}
Original file line number Diff line number Diff line change
Expand Up @@ -20,4 +20,5 @@ public class FeatureFlags {
private boolean nestedDomainsEnabled = false;
private boolean schemaFieldEntityFetchEnabled = false;
private boolean businessAttributeEntityEnabled = false;
private boolean dataContractsEnabled = false;
}
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
import com.linkedin.datahub.graphql.generated.AndFilterInput;
import com.linkedin.datahub.graphql.generated.FacetFilterInput;
import com.linkedin.datahub.graphql.resolvers.search.SearchUtils;
import com.linkedin.metadata.aspect.AspectRetriever;
import com.linkedin.metadata.query.filter.Condition;
import com.linkedin.metadata.query.filter.ConjunctiveCriterion;
import com.linkedin.metadata.query.filter.ConjunctiveCriterionArray;
Expand Down Expand Up @@ -112,10 +113,11 @@ public static Map<String, String> buildFacetFilters(
return facetFilters;
}

public static List<Criterion> criterionListFromAndFilter(List<FacetFilterInput> andFilters) {
public static List<Criterion> criterionListFromAndFilter(
List<FacetFilterInput> andFilters, @Nullable AspectRetriever aspectRetriever) {
return andFilters != null && !andFilters.isEmpty()
? andFilters.stream()
.map(filter -> criterionFromFilter(filter))
.map(filter -> criterionFromFilter(filter, aspectRetriever))
.collect(Collectors.toList())
: Collections.emptyList();
}
Expand All @@ -124,21 +126,24 @@ public static List<Criterion> criterionListFromAndFilter(List<FacetFilterInput>
// conjunctive criterion
// arrays, rather than just one for the AND case.
public static ConjunctiveCriterionArray buildConjunctiveCriterionArrayWithOr(
@Nonnull List<AndFilterInput> orFilters) {
@Nonnull List<AndFilterInput> orFilters, @Nullable AspectRetriever aspectRetriever) {
return new ConjunctiveCriterionArray(
orFilters.stream()
.map(
orFilter -> {
CriterionArray andCriterionForOr =
new CriterionArray(criterionListFromAndFilter(orFilter.getAnd()));
new CriterionArray(
criterionListFromAndFilter(orFilter.getAnd(), aspectRetriever));
return new ConjunctiveCriterion().setAnd(andCriterionForOr);
})
.collect(Collectors.toList()));
}

@Nullable
public static Filter buildFilter(
@Nullable List<FacetFilterInput> andFilters, @Nullable List<AndFilterInput> orFilters) {
@Nullable List<FacetFilterInput> andFilters,
@Nullable List<AndFilterInput> orFilters,
@Nullable AspectRetriever aspectRetriever) {
if ((andFilters == null || andFilters.isEmpty())
&& (orFilters == null || orFilters.isEmpty())) {
return null;
Expand All @@ -147,30 +152,33 @@ public static Filter buildFilter(
// Or filters are the new default. We will check them first.
// If we have OR filters, we need to build a series of CriterionArrays
if (orFilters != null && !orFilters.isEmpty()) {
return new Filter().setOr(buildConjunctiveCriterionArrayWithOr(orFilters));
return new Filter().setOr(buildConjunctiveCriterionArrayWithOr(orFilters, aspectRetriever));
}

// If or filters are not set, someone may be using the legacy and filters
final List<Criterion> andCriterions = criterionListFromAndFilter(andFilters);
final List<Criterion> andCriterions = criterionListFromAndFilter(andFilters, aspectRetriever);
return new Filter()
.setOr(
new ConjunctiveCriterionArray(
new ConjunctiveCriterion().setAnd(new CriterionArray(andCriterions))));
}

public static Criterion criterionFromFilter(final FacetFilterInput filter) {
return criterionFromFilter(filter, false);
public static Criterion criterionFromFilter(
final FacetFilterInput filter, @Nullable AspectRetriever aspectRetriever) {
return criterionFromFilter(filter, false, aspectRetriever);
}

// Translates a FacetFilterInput (graphql input class) into Criterion (our internal model)
public static Criterion criterionFromFilter(
final FacetFilterInput filter, final Boolean skipKeywordSuffix) {
final FacetFilterInput filter,
final Boolean skipKeywordSuffix,
@Nullable AspectRetriever aspectRetriever) {
Criterion result = new Criterion();

if (skipKeywordSuffix) {
result.setField(filter.getField());
} else {
result.setField(getFilterField(filter.getField(), skipKeywordSuffix));
result.setField(getFilterField(filter.getField(), skipKeywordSuffix, aspectRetriever));
}

// `value` is deprecated in place of `values`- this is to support old query patterns. If values
Expand Down Expand Up @@ -205,11 +213,13 @@ public static Criterion criterionFromFilter(
}

private static String getFilterField(
final String originalField, final boolean skipKeywordSuffix) {
final String originalField,
final boolean skipKeywordSuffix,
@Nullable AspectRetriever aspectRetriever) {
if (KEYWORD_EXCLUDED_FILTERS.contains(originalField)) {
return originalField;
}
return ESUtils.toKeywordField(originalField, skipKeywordSuffix);
return ESUtils.toKeywordField(originalField, skipKeywordSuffix, aspectRetriever);
}

public static Filter buildFilterWithUrns(@Nonnull Set<Urn> urns, @Nullable Filter inputFilters) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
import com.linkedin.datahub.graphql.types.dataset.mappers.AssertionRunEventMapper;
import com.linkedin.entity.client.EntityClient;
import com.linkedin.metadata.Constants;
import com.linkedin.metadata.aspect.AspectRetriever;
import com.linkedin.metadata.aspect.EnvelopedAspect;
import com.linkedin.metadata.query.filter.ConjunctiveCriterion;
import com.linkedin.metadata.query.filter.ConjunctiveCriterionArray;
Expand Down Expand Up @@ -67,7 +68,10 @@ public CompletableFuture<AssertionRunEventsResult> get(DataFetchingEnvironment e
maybeStartTimeMillis,
maybeEndTimeMillis,
maybeLimit,
buildFilter(maybeFilters, maybeStatus));
buildFilter(
maybeFilters,
maybeStatus,
context.getOperationContext().getAspectRetriever()));

// Step 2: Bind profiles into GraphQL strong types.
List<AssertionRunEvent> runEvents =
Expand Down Expand Up @@ -120,7 +124,9 @@ public CompletableFuture<AssertionRunEventsResult> get(DataFetchingEnvironment e

@Nullable
public static Filter buildFilter(
@Nullable FilterInput filtersInput, @Nullable final String status) {
@Nullable FilterInput filtersInput,
@Nullable final String status,
@Nullable AspectRetriever aspectRetriever) {
if (filtersInput == null && status == null) {
return null;
}
Expand All @@ -141,7 +147,7 @@ public static Filter buildFilter(
.setAnd(
new CriterionArray(
facetFilters.stream()
.map(filter -> criterionFromFilter(filter, true))
.map(filter -> criterionFromFilter(filter, true, aspectRetriever))
.collect(Collectors.toList())))));
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
package com.linkedin.datahub.graphql.resolvers.assertion;

import com.datahub.authorization.ConjunctivePrivilegeGroup;
import com.datahub.authorization.DisjunctivePrivilegeGroup;
import com.google.common.collect.ImmutableList;
import com.linkedin.common.urn.Urn;
import com.linkedin.datahub.graphql.QueryContext;
import com.linkedin.datahub.graphql.authorization.AuthorizationUtils;
import com.linkedin.metadata.authorization.PoliciesConfig;

public class AssertionUtils {
public static boolean isAuthorizedToEditAssertionFromAssertee(
final QueryContext context, final Urn asserteeUrn) {
final DisjunctivePrivilegeGroup orPrivilegeGroups =
new DisjunctivePrivilegeGroup(
ImmutableList.of(
AuthorizationUtils.ALL_PRIVILEGES_GROUP,
new ConjunctivePrivilegeGroup(
ImmutableList.of(PoliciesConfig.EDIT_ENTITY_ASSERTIONS_PRIVILEGE.getType()))));
return AuthorizationUtils.isAuthorized(
context.getAuthorizer(),
context.getActorUrn(),
asserteeUrn.getEntityType(),
asserteeUrn.toString(),
orPrivilegeGroups);
}
}
Loading

0 comments on commit 3111da0

Please sign in to comment.