Skip to content

Commit

Permalink
Merge branch 'master' into pyiceberg_dependency_update
Browse files Browse the repository at this point in the history
  • Loading branch information
treff7es authored Nov 28, 2024
2 parents 151627a + ecba224 commit 57a5095
Show file tree
Hide file tree
Showing 351 changed files with 15,782 additions and 4,357 deletions.
2 changes: 2 additions & 0 deletions .github/workflows/build-and-test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -83,6 +83,7 @@ jobs:
- uses: gradle/actions/setup-gradle@v3
- name: Gradle build (and test) for NOT metadata ingestion
if: ${{ matrix.command == 'except_metadata_ingestion' && needs.setup.outputs.backend_change == 'true' }}
# datahub-schematron:cli excluded due to dependency on metadata-ingestion
run: |
./gradlew build \
-x :metadata-ingestion:build \
Expand All @@ -100,6 +101,7 @@ jobs:
-x :metadata-ingestion-modules:gx-plugin:check \
-x :datahub-frontend:build \
-x :datahub-web-react:build \
-x :metadata-integration:java:datahub-schematron:cli:test \
--parallel
- name: Gradle build (and test) for frontend
if: ${{ matrix.command == 'frontend' && needs.setup.outputs.frontend_change == 'true' }}
Expand Down
1 change: 1 addition & 0 deletions .github/workflows/check-datahub-jars.yml
Original file line number Diff line number Diff line change
Expand Up @@ -40,4 +40,5 @@ jobs:
- name: check ${{ matrix.command }} jar
run: |
./gradlew :metadata-integration:java:${{ matrix.command }}:build --info
./gradlew :metadata-integration:java:${{ matrix.command }}:checkShadowJar
./gradlew :metadata-integration:java:${{ matrix.command }}:javadoc
3 changes: 2 additions & 1 deletion .github/workflows/pr-labeler.yml
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,8 @@ jobs:
"Salman-Apptware",
"mayurinehate",
"noggi",
"skrydal"
"skrydal",
"kevinkarchacryl"
]'),
github.actor
)
Expand Down
13 changes: 11 additions & 2 deletions build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,7 @@ buildscript {
// see also datahub-frontend/play.gradle
ext.playVersion = '2.8.22'
ext.playScalaVersion = '2.13'
ext.akkaVersion = '2.6.21' // 2.7.0+ has incompatible license
ext.log4jVersion = '2.23.1'
ext.slf4jVersion = '1.7.36'
ext.logbackClassic = '1.4.14'
Expand All @@ -56,7 +57,7 @@ buildscript {
ext.hazelcastVersion = '5.3.6'
ext.ebeanVersion = '15.5.2'
ext.googleJavaFormatVersion = '1.18.1'
ext.openLineageVersion = '1.19.0'
ext.openLineageVersion = '1.24.2'
ext.logbackClassicJava8 = '1.2.12'

ext.docker_registry = 'acryldata'
Expand Down Expand Up @@ -105,7 +106,14 @@ project.ext.spec = [
]

project.ext.externalDependency = [
'akkaHttp': "com.typesafe.akka:akka-http-core_$playScalaVersion:10.2.10",
'akkaHttp': "com.typesafe.akka:akka-http-core_$playScalaVersion:10.2.10", // max version due to licensing
'akkaActor': "com.typesafe.akka:akka-actor_$playScalaVersion:$akkaVersion",
'akkaStream': "com.typesafe.akka:akka-stream_$playScalaVersion:$akkaVersion",
'akkaActorTyped': "com.typesafe.akka:akka-actor-typed_$playScalaVersion:$akkaVersion",
'akkaSlf4j': "com.typesafe.akka:akka-slf4j_$playScalaVersion:$akkaVersion",
'akkaJackson': "com.typesafe.akka:akka-serialization-jackson_$playScalaVersion:$akkaVersion",
'akkaParsing': "com.typesafe.akka:akka-parsing_$playScalaVersion:$akkaVersion",
'akkaProtobuf': "com.typesafe.akka:akka-protobuf-v3_$playScalaVersion:$akkaVersion",
'antlr4Runtime': 'org.antlr:antlr4-runtime:4.9.3',
'antlr4': 'org.antlr:antlr4:4.9.3',
'assertJ': 'org.assertj:assertj-core:3.11.1',
Expand Down Expand Up @@ -350,6 +358,7 @@ allprojects {
}
}
}

}

configure(subprojects.findAll {! it.name.startsWith('spark-lineage')}) {
Expand Down
5 changes: 3 additions & 2 deletions datahub-frontend/app/auth/sso/oidc/OidcCallbackLogic.java
Original file line number Diff line number Diff line change
Expand Up @@ -130,8 +130,6 @@ public Object perform(
CallContext ctx = ctxResult.getFirst();
Result result = (Result) ctxResult.getSecond();

setContextRedirectUrl(ctx);

// Handle OIDC authentication errors.
if (OidcResponseErrorHandler.isError(ctx)) {
return OidcResponseErrorHandler.handleError(ctx);
Expand Down Expand Up @@ -192,6 +190,9 @@ private Pair<CallContext, Object> superPerform(
}
}

// Set the redirect url from cookie before creating action
setContextRedirectUrl(ctx);

action = this.redirectToOriginallyRequestedUrl(ctx, defaultUrl);
}
} catch (RuntimeException var20) {
Expand Down
7 changes: 7 additions & 0 deletions datahub-frontend/play.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -55,6 +55,13 @@ dependencies {
implementation externalDependency.antlr4Runtime
implementation externalDependency.antlr4
implementation externalDependency.akkaHttp
implementation externalDependency.akkaActor
implementation externalDependency.akkaStream
implementation externalDependency.akkaActorTyped
implementation externalDependency.akkaSlf4j
implementation externalDependency.akkaJackson
implementation externalDependency.akkaParsing
implementation externalDependency.akkaProtobuf

implementation externalDependency.jerseyCore
implementation externalDependency.jerseyGuava
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -63,6 +63,7 @@
import com.linkedin.datahub.graphql.generated.Domain;
import com.linkedin.datahub.graphql.generated.ERModelRelationship;
import com.linkedin.datahub.graphql.generated.ERModelRelationshipProperties;
import com.linkedin.datahub.graphql.generated.Entity;
import com.linkedin.datahub.graphql.generated.EntityPath;
import com.linkedin.datahub.graphql.generated.EntityRelationship;
import com.linkedin.datahub.graphql.generated.EntityRelationshipLegacy;
Expand Down Expand Up @@ -312,6 +313,7 @@
import com.linkedin.datahub.graphql.resolvers.type.HyperParameterValueTypeResolver;
import com.linkedin.datahub.graphql.resolvers.type.PlatformSchemaUnionTypeResolver;
import com.linkedin.datahub.graphql.resolvers.type.PropertyValueResolver;
import com.linkedin.datahub.graphql.resolvers.type.ResolvedActorResolver;
import com.linkedin.datahub.graphql.resolvers.type.ResultsTypeResolver;
import com.linkedin.datahub.graphql.resolvers.type.TimeSeriesAspectInterfaceTypeResolver;
import com.linkedin.datahub.graphql.resolvers.user.CreateNativeUserResetTokenResolver;
Expand Down Expand Up @@ -1730,12 +1732,22 @@ private void configureDatasetResolvers(final RuntimeWiring.Builder builder) {
.type(
"InstitutionalMemoryMetadata",
typeWiring ->
typeWiring.dataFetcher(
"author",
new LoadableTypeResolver<>(
corpUserType,
(env) ->
((InstitutionalMemoryMetadata) env.getSource()).getAuthor().getUrn())))
typeWiring
.dataFetcher(
"author",
new LoadableTypeResolver<>(
corpUserType,
(env) ->
((InstitutionalMemoryMetadata) env.getSource())
.getAuthor()
.getUrn()))
.dataFetcher(
"actor",
new EntityTypeResolver(
this.entityTypes,
(env) ->
(Entity)
((InstitutionalMemoryMetadata) env.getSource()).getActor())))
.type(
"DatasetStatsSummary",
typeWiring ->
Expand Down Expand Up @@ -2242,6 +2254,7 @@ private void configureTypeResolvers(final RuntimeWiring.Builder builder) {
"HyperParameterValueType",
typeWiring -> typeWiring.typeResolver(new HyperParameterValueTypeResolver()))
.type("PropertyValue", typeWiring -> typeWiring.typeResolver(new PropertyValueResolver()))
.type("ResolvedActor", typeWiring -> typeWiring.typeResolver(new ResolvedActorResolver()))
.type("Aspect", typeWiring -> typeWiring.typeResolver(new AspectInterfaceTypeResolver()))
.type(
"TimeSeriesAspect",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -84,8 +84,21 @@ private TimeSeriesChart getActiveUsersTimeSeriesChart(
final DateTime end,
final String title,
final DateInterval interval) {
final DateRange dateRange =
new DateRange(String.valueOf(beginning.getMillis()), String.valueOf(end.getMillis()));

final DateRange dateRange;

// adjust month to show 1st of month rather than last day of previous month
if (interval == DateInterval.MONTH) {
dateRange =
new DateRange(
String.valueOf(beginning.plusDays(1).getMillis()), // Shift start by 1 day
String.valueOf(end.plusDays(1).getMillis()) // Shift end by 1 day
);
} else {
// week display starting Sundays
dateRange =
new DateRange(String.valueOf(beginning.getMillis()), String.valueOf(end.getMillis()));
}

final List<NamedLine> timeSeriesLines =
_analyticsService.getTimeseriesChart(
Expand All @@ -96,6 +109,7 @@ private TimeSeriesChart getActiveUsersTimeSeriesChart(
ImmutableMap.of(),
Collections.emptyMap(),
Optional.of("browserId"));

return TimeSeriesChart.builder()
.setTitle(title)
.setDateRange(dateRange)
Expand Down
Original file line number Diff line number Diff line change
@@ -1,19 +1,14 @@
package com.linkedin.datahub.graphql.analytics.service;

import static com.linkedin.metadata.Constants.CORP_USER_EDITABLE_INFO_ASPECT_NAME;
import static com.linkedin.metadata.Constants.CORP_USER_ENTITY_NAME;
import static com.linkedin.metadata.Constants.CORP_USER_INFO_ASPECT_NAME;

import com.google.common.collect.ImmutableSet;
import com.linkedin.common.urn.Urn;
import com.linkedin.common.urn.UrnUtils;
import com.linkedin.dashboard.DashboardInfo;
import com.linkedin.datahub.graphql.generated.BarSegment;
import com.linkedin.datahub.graphql.generated.Cell;
import com.linkedin.datahub.graphql.generated.Entity;
import com.linkedin.datahub.graphql.generated.EntityProfileParams;
import com.linkedin.datahub.graphql.generated.LinkParams;
import com.linkedin.datahub.graphql.generated.NamedBar;
import com.linkedin.datahub.graphql.generated.Row;
import com.linkedin.datahub.graphql.generated.SearchParams;
import com.linkedin.datahub.graphql.generated.*;
import com.linkedin.datahub.graphql.types.common.mappers.UrnToEntityMapper;
import com.linkedin.dataplatform.DataPlatformInfo;
import com.linkedin.dataset.DatasetProperties;
Expand All @@ -22,6 +17,7 @@
import com.linkedin.entity.EnvelopedAspect;
import com.linkedin.entity.client.EntityClient;
import com.linkedin.glossary.GlossaryTermInfo;
import com.linkedin.identity.CorpUserEditableInfo;
import com.linkedin.identity.CorpUserInfo;
import com.linkedin.metadata.Constants;
import com.linkedin.metadata.key.GlossaryTermKey;
Expand All @@ -35,6 +31,7 @@
import java.util.Set;
import java.util.function.Function;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import lombok.extern.slf4j.Slf4j;
Expand Down Expand Up @@ -169,36 +166,79 @@ public static void convertToUserInfoRows(
final Map<Urn, EntityResponse> gmsResponseByUser =
entityClient.batchGetV2(
opContext,
CORP_USER_INFO_ASPECT_NAME,
CORP_USER_ENTITY_NAME,
userUrns,
ImmutableSet.of(CORP_USER_INFO_ASPECT_NAME));
final Map<Urn, CorpUserInfo> urnToCorpUserInfo =
ImmutableSet.of(CORP_USER_INFO_ASPECT_NAME, CORP_USER_EDITABLE_INFO_ASPECT_NAME));
final Stream<Map.Entry<Urn, EntityResponse>> entityStream =
gmsResponseByUser.entrySet().stream()
.filter(
entry ->
entry.getValue() != null
&& entry.getValue().getAspects().containsKey(CORP_USER_INFO_ASPECT_NAME))
.collect(
Collectors.toMap(
Map.Entry::getKey,
entry ->
&& (entry.getValue().getAspects().containsKey(CORP_USER_INFO_ASPECT_NAME)
|| entry
.getValue()
.getAspects()
.containsKey(CORP_USER_EDITABLE_INFO_ASPECT_NAME)));
final Map<Urn, Pair<CorpUserInfo, CorpUserEditableInfo>> urnToCorpUserInfo =
entityStream.collect(
Collectors.toMap(
Map.Entry::getKey,
entry -> {
CorpUserInfo userInfo = null;
CorpUserEditableInfo editableInfo = null;
try {
userInfo =
new CorpUserInfo(
entry
.getValue()
.getAspects()
.get(CORP_USER_INFO_ASPECT_NAME)
.getValue()
.data())));
.data());
} catch (Exception e) {
// nothing to do
}
try {

editableInfo =
new CorpUserEditableInfo(
entry
.getValue()
.getAspects()
.get(CORP_USER_EDITABLE_INFO_ASPECT_NAME)
.getValue()
.data());
} catch (Exception e) {
// nothing to do
}

return Pair.of(userInfo, editableInfo);
}));
// Populate a row with the user link, title, and email.
rows.forEach(
row -> {
Urn urn = UrnUtils.getUrn(row.getCells().get(0).getValue());
EntityResponse response = gmsResponseByUser.get(urn);
String maybeDisplayName = response != null ? getUserName(response).orElse(null) : null;
String maybeEmail =
urnToCorpUserInfo.containsKey(urn) ? urnToCorpUserInfo.get(urn).getEmail() : null;
String maybeTitle =
urnToCorpUserInfo.containsKey(urn) ? urnToCorpUserInfo.get(urn).getTitle() : null;
String maybeEmail = null;
String maybeTitle = null;
if (urnToCorpUserInfo.containsKey(urn)) {
Pair<CorpUserInfo, CorpUserEditableInfo> pair = urnToCorpUserInfo.get(urn);
if (pair.getLeft() != null) {
CorpUserInfo userInfo = pair.getLeft();
maybeEmail = userInfo.getEmail();
maybeTitle = userInfo.getTitle();
}
if (pair.getRight() != null) {
CorpUserEditableInfo userInfo = pair.getRight();
if (maybeEmail == null) {
maybeEmail = userInfo.getEmail();
}
if (maybeTitle == null) {
maybeTitle = userInfo.getTitle();
}
}
}
if (maybeDisplayName != null) {
row.getCells().get(0).setValue(maybeDisplayName);
}
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
package com.linkedin.datahub.graphql.resolvers.type;

import com.linkedin.datahub.graphql.generated.CorpGroup;
import com.linkedin.datahub.graphql.generated.CorpUser;
import graphql.TypeResolutionEnvironment;
import graphql.schema.GraphQLObjectType;
import graphql.schema.TypeResolver;

public class ResolvedActorResolver implements TypeResolver {

public static final String CORP_USER = "CorpUser";
public static final String CORP_GROUP = "CorpGroup";

@Override
public GraphQLObjectType getType(TypeResolutionEnvironment env) {
if (env.getObject() instanceof CorpUser) {
return env.getSchema().getObjectType(CORP_USER);
} else if (env.getObject() instanceof CorpGroup) {
return env.getSchema().getObjectType(CORP_GROUP);
} else {
throw new RuntimeException(
"Unrecognized object type provided to type resolver, Type:" + env.getObject().toString());
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@ public InstitutionalMemoryMetadata apply(
result.setDescription(input.getDescription()); // deprecated field
result.setLabel(input.getDescription());
result.setAuthor(getAuthor(input.getCreateStamp().getActor().toString()));
result.setActor(ResolvedActorMapper.map(input.getCreateStamp().getActor()));
result.setCreated(AuditStampMapper.map(context, input.getCreateStamp()));
result.setAssociatedUrn(entityUrn.toString());
return result;
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
package com.linkedin.datahub.graphql.types.common.mappers;

import com.linkedin.common.urn.Urn;
import com.linkedin.datahub.graphql.generated.CorpGroup;
import com.linkedin.datahub.graphql.generated.CorpUser;
import com.linkedin.datahub.graphql.generated.EntityType;
import com.linkedin.datahub.graphql.generated.ResolvedActor;
import com.linkedin.metadata.Constants;
import javax.annotation.Nonnull;

public class ResolvedActorMapper {

public static final ResolvedActorMapper INSTANCE = new ResolvedActorMapper();

public static ResolvedActor map(@Nonnull final Urn actorUrn) {
return INSTANCE.apply(actorUrn);
}

public ResolvedActor apply(@Nonnull final Urn actorUrn) {
if (actorUrn.getEntityType().equals(Constants.CORP_GROUP_ENTITY_NAME)) {
CorpGroup partialGroup = new CorpGroup();
partialGroup.setUrn(actorUrn.toString());
partialGroup.setType(EntityType.CORP_GROUP);
return partialGroup;
}
CorpUser partialUser = new CorpUser();
partialUser.setUrn(actorUrn.toString());
partialUser.setType(EntityType.CORP_USER);
return (ResolvedActor) partialUser;
}
}
Loading

0 comments on commit 57a5095

Please sign in to comment.