Skip to content

Commit

Permalink
feat(blame) - add schema history blame UI
Browse files Browse the repository at this point in the history
  • Loading branch information
aditya-radhakrishnan committed May 2, 2022
1 parent 1afbc49 commit fe8527d
Show file tree
Hide file tree
Showing 25 changed files with 1,577 additions and 367 deletions.
1 change: 1 addition & 0 deletions datahub-graphql-core/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@ graphqlCodegen {
"$projectDir/src/main/resources/recommendation.graphql".toString(),
"$projectDir/src/main/resources/ingestion.graphql".toString(),
"$projectDir/src/main/resources/auth.graphql".toString(),
"$projectDir/src/main/resources/timeline.graphql".toString(),
]
outputDir = new File("$projectDir/src/mainGeneratedGraphQL/java")
packageName = "com.linkedin.datahub.graphql.generated"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ public class Constants {
public static final String ANALYTICS_SCHEMA_FILE = "analytics.graphql";
public static final String RECOMMENDATIONS_SCHEMA_FILE = "recommendation.graphql";
public static final String INGESTION_SCHEMA_FILE = "ingestion.graphql";
public static final String TIMELINE_SCHEMA_FILE = "timeline.graphql";
public static final String BROWSE_PATH_DELIMITER = "/";
public static final String VERSION_STAMP_FIELD_NAME = "versionStamp";
}
Original file line number Diff line number Diff line change
Expand Up @@ -76,10 +76,6 @@
import com.linkedin.datahub.graphql.resolvers.group.ListGroupsResolver;
import com.linkedin.datahub.graphql.resolvers.group.RemoveGroupMembersResolver;
import com.linkedin.datahub.graphql.resolvers.group.RemoveGroupResolver;
import com.linkedin.datahub.graphql.resolvers.jobs.EntityRunsResolver;
import com.linkedin.datahub.graphql.resolvers.jobs.DataJobRunsResolver;
import com.linkedin.datahub.graphql.resolvers.user.UpdateUserStatusResolver;
import com.linkedin.datahub.graphql.resolvers.policy.GetGrantedPrivilegesResolver;
import com.linkedin.datahub.graphql.resolvers.ingest.execution.CancelIngestionExecutionRequestResolver;
import com.linkedin.datahub.graphql.resolvers.ingest.execution.CreateIngestionExecutionRequestResolver;
import com.linkedin.datahub.graphql.resolvers.ingest.execution.GetIngestionExecutionRequestResolver;
Expand All @@ -92,6 +88,8 @@
import com.linkedin.datahub.graphql.resolvers.ingest.source.GetIngestionSourceResolver;
import com.linkedin.datahub.graphql.resolvers.ingest.source.ListIngestionSourcesResolver;
import com.linkedin.datahub.graphql.resolvers.ingest.source.UpsertIngestionSourceResolver;
import com.linkedin.datahub.graphql.resolvers.jobs.DataJobRunsResolver;
import com.linkedin.datahub.graphql.resolvers.jobs.EntityRunsResolver;
import com.linkedin.datahub.graphql.resolvers.load.AspectResolver;
import com.linkedin.datahub.graphql.resolvers.load.EntityLineageResultResolver;
import com.linkedin.datahub.graphql.resolvers.load.EntityRelationshipsResultResolver;
Expand All @@ -113,6 +111,7 @@
import com.linkedin.datahub.graphql.resolvers.mutate.RemoveTermResolver;
import com.linkedin.datahub.graphql.resolvers.mutate.UpdateDescriptionResolver;
import com.linkedin.datahub.graphql.resolvers.policy.DeletePolicyResolver;
import com.linkedin.datahub.graphql.resolvers.policy.GetGrantedPrivilegesResolver;
import com.linkedin.datahub.graphql.resolvers.policy.ListPoliciesResolver;
import com.linkedin.datahub.graphql.resolvers.policy.UpsertPolicyResolver;
import com.linkedin.datahub.graphql.resolvers.recommendation.ListRecommendationsResolver;
Expand All @@ -122,6 +121,7 @@
import com.linkedin.datahub.graphql.resolvers.search.SearchAcrossLineageResolver;
import com.linkedin.datahub.graphql.resolvers.search.SearchResolver;
import com.linkedin.datahub.graphql.resolvers.tag.SetTagColorResolver;
import com.linkedin.datahub.graphql.resolvers.timeline.GetSchemaBlameResolver;
import com.linkedin.datahub.graphql.resolvers.type.AspectInterfaceTypeResolver;
import com.linkedin.datahub.graphql.resolvers.type.EntityInterfaceTypeResolver;
import com.linkedin.datahub.graphql.resolvers.type.HyperParameterValueTypeResolver;
Expand All @@ -130,8 +130,8 @@
import com.linkedin.datahub.graphql.resolvers.type.TimeSeriesAspectInterfaceTypeResolver;
import com.linkedin.datahub.graphql.resolvers.user.ListUsersResolver;
import com.linkedin.datahub.graphql.resolvers.user.RemoveUserResolver;
import com.linkedin.datahub.graphql.resolvers.user.UpdateUserStatusResolver;
import com.linkedin.datahub.graphql.types.BrowsableEntityType;
import com.linkedin.datahub.graphql.types.dataprocessinst.mappers.DataProcessInstanceRunEventMapper;
import com.linkedin.datahub.graphql.types.EntityType;
import com.linkedin.datahub.graphql.types.LoadableType;
import com.linkedin.datahub.graphql.types.SearchableEntityType;
Expand All @@ -144,10 +144,10 @@
import com.linkedin.datahub.graphql.types.corpuser.CorpUserType;
import com.linkedin.datahub.graphql.types.dashboard.DashboardType;
import com.linkedin.datahub.graphql.types.dataset.VersionedDatasetType;
import com.linkedin.datahub.graphql.types.notebook.NotebookType;
import com.linkedin.datahub.graphql.types.dataflow.DataFlowType;
import com.linkedin.datahub.graphql.types.datajob.DataJobType;
import com.linkedin.datahub.graphql.types.dataplatform.DataPlatformType;
import com.linkedin.datahub.graphql.types.dataprocessinst.mappers.DataProcessInstanceRunEventMapper;
import com.linkedin.datahub.graphql.types.dataset.DatasetType;
import com.linkedin.datahub.graphql.types.dataset.mappers.DatasetProfileMapper;
import com.linkedin.datahub.graphql.types.domain.DomainType;
Expand All @@ -157,6 +157,7 @@
import com.linkedin.datahub.graphql.types.mlmodel.MLModelGroupType;
import com.linkedin.datahub.graphql.types.mlmodel.MLModelType;
import com.linkedin.datahub.graphql.types.mlmodel.MLPrimaryKeyType;
import com.linkedin.datahub.graphql.types.notebook.NotebookType;
import com.linkedin.datahub.graphql.types.tag.TagType;
import com.linkedin.datahub.graphql.types.usage.UsageType;
import com.linkedin.entity.client.EntityClient;
Expand All @@ -167,6 +168,7 @@
import com.linkedin.metadata.recommendation.RecommendationsService;
import com.linkedin.metadata.secret.SecretService;
import com.linkedin.metadata.telemetry.TelemetryConfiguration;
import com.linkedin.metadata.timeline.TimelineService;
import com.linkedin.metadata.timeseries.TimeseriesAspectService;
import com.linkedin.metadata.version.GitVersion;
import com.linkedin.usage.UsageClient;
Expand Down Expand Up @@ -195,7 +197,7 @@

import static com.linkedin.datahub.graphql.Constants.*;
import static com.linkedin.metadata.Constants.*;
import static graphql.Scalars.GraphQLLong;
import static graphql.Scalars.*;


/**
Expand All @@ -217,6 +219,7 @@ public class GmsGraphQLEngine {
private final GitVersion gitVersion;
private final boolean supportsImpactAnalysis;
private final TimeseriesAspectService timeseriesAspectService;
private final TimelineService timelineService;

private final IngestionConfiguration ingestionConfiguration;
private final AuthenticationConfiguration authenticationConfiguration;
Expand Down Expand Up @@ -288,6 +291,7 @@ public GmsGraphQLEngine(
final AuthenticationConfiguration authenticationConfiguration,
final AuthorizationConfiguration authorizationConfiguration,
final GitVersion gitVersion,
final TimelineService timelineService,
final boolean supportsImpactAnalysis,
final VisualConfiguration visualConfiguration,
final TelemetryConfiguration telemetryConfiguration
Expand All @@ -306,6 +310,7 @@ public GmsGraphQLEngine(
this.gitVersion = gitVersion;
this.supportsImpactAnalysis = supportsImpactAnalysis;
this.timeseriesAspectService = timeseriesAspectService;
this.timelineService = timelineService;

this.ingestionConfiguration = Objects.requireNonNull(ingestionConfiguration);
this.authenticationConfiguration = Objects.requireNonNull(authenticationConfiguration);
Expand Down Expand Up @@ -421,6 +426,7 @@ public GraphQLEngine.Builder builder() {
.addSchema(fileBasedSchema(ANALYTICS_SCHEMA_FILE))
.addSchema(fileBasedSchema(RECOMMENDATIONS_SCHEMA_FILE))
.addSchema(fileBasedSchema(INGESTION_SCHEMA_FILE))
.addSchema(fileBasedSchema(TIMELINE_SCHEMA_FILE))
.addDataLoaders(loaderSuppliers(loadableTypes))
.addDataLoader("Aspect", context -> createDataLoader(aspectType, context))
.addDataLoader("UsageQueryResult", context -> createDataLoader(usageType, context))
Expand Down Expand Up @@ -524,6 +530,7 @@ private void configureQueryResolvers(final RuntimeWiring.Builder builder) {
.dataFetcher("listIngestionSources", new ListIngestionSourcesResolver(this.entityClient))
.dataFetcher("ingestionSource", new GetIngestionSourceResolver(this.entityClient))
.dataFetcher("executionRequest", new GetIngestionExecutionRequestResolver(this.entityClient))
.dataFetcher("getSchemaBlame", new GetSchemaBlameResolver(this.timelineService))
);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,7 @@ public CompletableFuture<AppConfig> get(final DataFetchingEnvironment environmen

final ManagedIngestionConfig ingestionConfig = new ManagedIngestionConfig();
ingestionConfig.setEnabled(_ingestionConfiguration.isEnabled());
appConfig.setAuthConfig(authConfig);

appConfig.setAnalyticsConfig(analyticsConfig);
appConfig.setPoliciesConfig(policiesConfig);
appConfig.setIdentityManagementConfig(identityManagementConfig);
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,61 @@
package com.linkedin.datahub.graphql.resolvers.timeline;

import com.linkedin.common.urn.Urn;
import com.linkedin.datahub.graphql.generated.GetSchemaBlameInput;
import com.linkedin.datahub.graphql.generated.GetSchemaBlameResult;
import com.linkedin.datahub.graphql.types.timeline.mappers.SchemaFieldBlameMapper;
import com.linkedin.metadata.timeline.TimelineService;
import com.linkedin.metadata.timeline.data.ChangeCategory;
import com.linkedin.metadata.timeline.data.ChangeTransaction;
import graphql.schema.DataFetcher;
import graphql.schema.DataFetchingEnvironment;
import java.net.URISyntaxException;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.concurrent.CompletableFuture;
import lombok.extern.slf4j.Slf4j;

import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*;


@Slf4j
public class GetSchemaBlameResolver implements DataFetcher<CompletableFuture<GetSchemaBlameResult>> {
private final TimelineService _timelineService;

public GetSchemaBlameResolver(TimelineService timelineService) {
_timelineService = timelineService;
}

@Override
public CompletableFuture<GetSchemaBlameResult> get(final DataFetchingEnvironment environment) throws Exception {
final GetSchemaBlameInput input = bindArgument(environment.getArgument("input"), GetSchemaBlameInput.class);

final String datasetUrnString = input.getDatasetUrn() == null ? null : input.getDatasetUrn();
final long startTime = 0;
final long endTime = 0;
final String version = input.getVersion() == null ? null : input.getVersion();

return CompletableFuture.supplyAsync(() -> {
try {
if (datasetUrnString == null) {
return null;
}
final Set<ChangeCategory> changeCategorySet = new HashSet<>();
changeCategorySet.add(ChangeCategory.TECHNICAL_SCHEMA);
Urn datasetUrn = Urn.createFromString(datasetUrnString);
List<ChangeTransaction> changeTransactionList =
_timelineService.getTimeline(datasetUrn, changeCategorySet, startTime, endTime, null, null, false);
return SchemaFieldBlameMapper.map(changeTransactionList, version);
} catch (URISyntaxException u) {
log.debug(
String.format("Failed to list schema blame data, likely due to the Urn %s being invalid", datasetUrnString),
u);
return null;
} catch (Exception e) {
log.debug("Failed to list schema blame data", e);
return null;
}
});
}
}
Loading

0 comments on commit fe8527d

Please sign in to comment.