Skip to content

Commit

Permalink
feat(blame) - add schema history blame UI
Browse files Browse the repository at this point in the history
  • Loading branch information
aditya-radhakrishnan committed Apr 29, 2022
1 parent c34a1ba commit 00340fd
Show file tree
Hide file tree
Showing 27 changed files with 1,159 additions and 59 deletions.
1 change: 1 addition & 0 deletions datahub-graphql-core/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@ graphqlCodegen {
"$projectDir/src/main/resources/recommendation.graphql".toString(),
"$projectDir/src/main/resources/ingestion.graphql".toString(),
"$projectDir/src/main/resources/auth.graphql".toString(),
"$projectDir/src/main/resources/timeline.graphql".toString(),
]
outputDir = new File("$projectDir/src/mainGeneratedGraphQL/java")
packageName = "com.linkedin.datahub.graphql.generated"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ public class Constants {
public static final String ANALYTICS_SCHEMA_FILE = "analytics.graphql";
public static final String RECOMMENDATIONS_SCHEMA_FILE = "recommendation.graphql";
public static final String INGESTION_SCHEMA_FILE = "ingestion.graphql";
public static final String TIMELINE_SCHEMA_FILE = "timeline.graphql";
public static final String BROWSE_PATH_DELIMITER = "/";
public static final String VERSION_STAMP_FIELD_NAME = "versionStamp";
}
Original file line number Diff line number Diff line change
Expand Up @@ -76,10 +76,6 @@
import com.linkedin.datahub.graphql.resolvers.group.ListGroupsResolver;
import com.linkedin.datahub.graphql.resolvers.group.RemoveGroupMembersResolver;
import com.linkedin.datahub.graphql.resolvers.group.RemoveGroupResolver;
import com.linkedin.datahub.graphql.resolvers.jobs.EntityRunsResolver;
import com.linkedin.datahub.graphql.resolvers.jobs.DataJobRunsResolver;
import com.linkedin.datahub.graphql.resolvers.user.UpdateUserStatusResolver;
import com.linkedin.datahub.graphql.resolvers.policy.GetGrantedPrivilegesResolver;
import com.linkedin.datahub.graphql.resolvers.ingest.execution.CancelIngestionExecutionRequestResolver;
import com.linkedin.datahub.graphql.resolvers.ingest.execution.CreateIngestionExecutionRequestResolver;
import com.linkedin.datahub.graphql.resolvers.ingest.execution.GetIngestionExecutionRequestResolver;
Expand All @@ -92,6 +88,8 @@
import com.linkedin.datahub.graphql.resolvers.ingest.source.GetIngestionSourceResolver;
import com.linkedin.datahub.graphql.resolvers.ingest.source.ListIngestionSourcesResolver;
import com.linkedin.datahub.graphql.resolvers.ingest.source.UpsertIngestionSourceResolver;
import com.linkedin.datahub.graphql.resolvers.jobs.DataJobRunsResolver;
import com.linkedin.datahub.graphql.resolvers.jobs.EntityRunsResolver;
import com.linkedin.datahub.graphql.resolvers.load.AspectResolver;
import com.linkedin.datahub.graphql.resolvers.load.EntityLineageResultResolver;
import com.linkedin.datahub.graphql.resolvers.load.EntityRelationshipsResultResolver;
Expand All @@ -113,6 +111,7 @@
import com.linkedin.datahub.graphql.resolvers.mutate.RemoveTermResolver;
import com.linkedin.datahub.graphql.resolvers.mutate.UpdateDescriptionResolver;
import com.linkedin.datahub.graphql.resolvers.policy.DeletePolicyResolver;
import com.linkedin.datahub.graphql.resolvers.policy.GetGrantedPrivilegesResolver;
import com.linkedin.datahub.graphql.resolvers.policy.ListPoliciesResolver;
import com.linkedin.datahub.graphql.resolvers.policy.UpsertPolicyResolver;
import com.linkedin.datahub.graphql.resolvers.recommendation.ListRecommendationsResolver;
Expand All @@ -122,6 +121,7 @@
import com.linkedin.datahub.graphql.resolvers.search.SearchAcrossLineageResolver;
import com.linkedin.datahub.graphql.resolvers.search.SearchResolver;
import com.linkedin.datahub.graphql.resolvers.tag.SetTagColorResolver;
import com.linkedin.datahub.graphql.resolvers.timeline.ListSchemaBlameResolver;
import com.linkedin.datahub.graphql.resolvers.type.AspectInterfaceTypeResolver;
import com.linkedin.datahub.graphql.resolvers.type.EntityInterfaceTypeResolver;
import com.linkedin.datahub.graphql.resolvers.type.HyperParameterValueTypeResolver;
Expand All @@ -130,8 +130,8 @@
import com.linkedin.datahub.graphql.resolvers.type.TimeSeriesAspectInterfaceTypeResolver;
import com.linkedin.datahub.graphql.resolvers.user.ListUsersResolver;
import com.linkedin.datahub.graphql.resolvers.user.RemoveUserResolver;
import com.linkedin.datahub.graphql.resolvers.user.UpdateUserStatusResolver;
import com.linkedin.datahub.graphql.types.BrowsableEntityType;
import com.linkedin.datahub.graphql.types.dataprocessinst.mappers.DataProcessInstanceRunEventMapper;
import com.linkedin.datahub.graphql.types.EntityType;
import com.linkedin.datahub.graphql.types.LoadableType;
import com.linkedin.datahub.graphql.types.SearchableEntityType;
Expand All @@ -144,10 +144,10 @@
import com.linkedin.datahub.graphql.types.corpuser.CorpUserType;
import com.linkedin.datahub.graphql.types.dashboard.DashboardType;
import com.linkedin.datahub.graphql.types.dataset.VersionedDatasetType;
import com.linkedin.datahub.graphql.types.notebook.NotebookType;
import com.linkedin.datahub.graphql.types.dataflow.DataFlowType;
import com.linkedin.datahub.graphql.types.datajob.DataJobType;
import com.linkedin.datahub.graphql.types.dataplatform.DataPlatformType;
import com.linkedin.datahub.graphql.types.dataprocessinst.mappers.DataProcessInstanceRunEventMapper;
import com.linkedin.datahub.graphql.types.dataset.DatasetType;
import com.linkedin.datahub.graphql.types.dataset.mappers.DatasetProfileMapper;
import com.linkedin.datahub.graphql.types.domain.DomainType;
Expand All @@ -157,6 +157,7 @@
import com.linkedin.datahub.graphql.types.mlmodel.MLModelGroupType;
import com.linkedin.datahub.graphql.types.mlmodel.MLModelType;
import com.linkedin.datahub.graphql.types.mlmodel.MLPrimaryKeyType;
import com.linkedin.datahub.graphql.types.notebook.NotebookType;
import com.linkedin.datahub.graphql.types.tag.TagType;
import com.linkedin.datahub.graphql.types.usage.UsageType;
import com.linkedin.entity.client.EntityClient;
Expand All @@ -166,6 +167,7 @@
import com.linkedin.metadata.models.registry.EntityRegistry;
import com.linkedin.metadata.recommendation.RecommendationsService;
import com.linkedin.metadata.secret.SecretService;
import com.linkedin.metadata.timeline.TimelineService;
import com.linkedin.metadata.timeseries.TimeseriesAspectService;
import com.linkedin.metadata.version.GitVersion;
import com.linkedin.usage.UsageClient;
Expand Down Expand Up @@ -194,7 +196,7 @@

import static com.linkedin.datahub.graphql.Constants.*;
import static com.linkedin.metadata.Constants.*;
import static graphql.Scalars.GraphQLLong;
import static graphql.Scalars.*;


/**
Expand All @@ -216,6 +218,7 @@ public class GmsGraphQLEngine {
private final GitVersion gitVersion;
private final boolean supportsImpactAnalysis;
private final TimeseriesAspectService timeseriesAspectService;
private final TimelineService timelineService;

private final IngestionConfiguration ingestionConfiguration;
private final AuthenticationConfiguration authenticationConfiguration;
Expand Down Expand Up @@ -286,6 +289,7 @@ public GmsGraphQLEngine(
final AuthenticationConfiguration authenticationConfiguration,
final AuthorizationConfiguration authorizationConfiguration,
final GitVersion gitVersion,
final TimelineService timelineService,
final boolean supportsImpactAnalysis,
final VisualConfiguration visualConfiguration
) {
Expand All @@ -303,6 +307,7 @@ public GmsGraphQLEngine(
this.gitVersion = gitVersion;
this.supportsImpactAnalysis = supportsImpactAnalysis;
this.timeseriesAspectService = timeseriesAspectService;
this.timelineService = timelineService;

this.ingestionConfiguration = Objects.requireNonNull(ingestionConfiguration);
this.authenticationConfiguration = Objects.requireNonNull(authenticationConfiguration);
Expand Down Expand Up @@ -417,6 +422,7 @@ public GraphQLEngine.Builder builder() {
.addSchema(fileBasedSchema(ANALYTICS_SCHEMA_FILE))
.addSchema(fileBasedSchema(RECOMMENDATIONS_SCHEMA_FILE))
.addSchema(fileBasedSchema(INGESTION_SCHEMA_FILE))
.addSchema(fileBasedSchema(TIMELINE_SCHEMA_FILE))
.addDataLoaders(loaderSuppliers(loadableTypes))
.addDataLoader("Aspect", context -> createDataLoader(aspectType, context))
.addDataLoader("UsageQueryResult", context -> createDataLoader(usageType, context))
Expand Down Expand Up @@ -520,6 +526,7 @@ private void configureQueryResolvers(final RuntimeWiring.Builder builder) {
.dataFetcher("listIngestionSources", new ListIngestionSourcesResolver(this.entityClient))
.dataFetcher("ingestionSource", new GetIngestionSourceResolver(this.entityClient))
.dataFetcher("executionRequest", new GetIngestionExecutionRequestResolver(this.entityClient))
.dataFetcher("listSchemaBlame", new ListSchemaBlameResolver(this.timelineService))
);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
import com.linkedin.datahub.graphql.generated.PoliciesConfig;
import com.linkedin.datahub.graphql.generated.Privilege;
import com.linkedin.datahub.graphql.generated.ResourcePrivileges;
import com.linkedin.datahub.graphql.generated.TimelineConfig;
import com.linkedin.datahub.graphql.generated.VisualConfiguration;
import com.linkedin.metadata.config.IngestionConfiguration;
import com.linkedin.metadata.version.GitVersion;
Expand Down Expand Up @@ -90,14 +91,19 @@ public CompletableFuture<AppConfig> get(final DataFetchingEnvironment environmen

final ManagedIngestionConfig ingestionConfig = new ManagedIngestionConfig();
ingestionConfig.setEnabled(_ingestionConfiguration.isEnabled());
appConfig.setAuthConfig(authConfig);

final TimelineConfig timelineConfig = new TimelineConfig();
timelineConfig.setEnabled(true);

appConfig.setAnalyticsConfig(analyticsConfig);
appConfig.setPoliciesConfig(policiesConfig);
appConfig.setIdentityManagementConfig(identityManagementConfig);
appConfig.setManagedIngestionConfig(ingestionConfig);
appConfig.setAuthConfig(authConfig);
appConfig.setVisualConfig(_visualConfiguration);

appConfig.setTimelineConfig(timelineConfig);

return CompletableFuture.completedFuture(appConfig);
}

Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,65 @@
package com.linkedin.datahub.graphql.resolvers.timeline;

import com.linkedin.common.urn.Urn;
import com.linkedin.datahub.graphql.generated.ListSchemaBlameInput;
import com.linkedin.datahub.graphql.generated.ListSchemaBlameResult;
import com.linkedin.datahub.graphql.types.timeline.mappers.SchemaBlameMapper;
import com.linkedin.metadata.timeline.TimelineService;
import com.linkedin.metadata.timeline.data.ChangeCategory;
import com.linkedin.metadata.timeline.data.ChangeTransaction;
import graphql.schema.DataFetcher;
import graphql.schema.DataFetchingEnvironment;
import java.net.URISyntaxException;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.concurrent.CompletableFuture;
import lombok.extern.slf4j.Slf4j;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*;


@Slf4j
public class ListSchemaBlameResolver implements DataFetcher<CompletableFuture<ListSchemaBlameResult>> {
private final TimelineService _timelineService;

private static final Logger _logger = LoggerFactory.getLogger(ListSchemaBlameResolver.class.getName());

public ListSchemaBlameResolver(TimelineService timelineService) {
_timelineService = timelineService;
}

@Override
public CompletableFuture<ListSchemaBlameResult> get(final DataFetchingEnvironment environment) throws Exception {
final ListSchemaBlameInput input = bindArgument(environment.getArgument("input"), ListSchemaBlameInput.class);

final String datasetUrnString = input.getDatasetUrn() == null ? null : input.getDatasetUrn();
final long startTime = 0;
final long endTime = 0;
final String versionCutoff = input.getVersionCutoff() == null ? null : input.getVersionCutoff();

return CompletableFuture.supplyAsync(() -> {
try {
if (datasetUrnString == null) {
return null;
}
final Set<ChangeCategory> changeCategorySet = new HashSet<>();
changeCategorySet.add(ChangeCategory.TECHNICAL_SCHEMA);
Urn datasetUrn = Urn.createFromString(datasetUrnString);
List<ChangeTransaction> changeTransactionList =
_timelineService.getTimeline(datasetUrn, changeCategorySet, startTime, endTime, null, null, false);
return SchemaBlameMapper.map(changeTransactionList, versionCutoff);
} catch (URISyntaxException u) {
_logger.debug(
String.format("Failed to list schema blame data, likely due to the Urn %s being invalid", datasetUrnString),
u);
return null;
} catch (Exception e) {
_logger.debug("Failed to list schema blame data", e);
return null;
}
});
}
}
Loading

0 comments on commit 00340fd

Please sign in to comment.