Skip to content

Commit

Permalink
Merge branch 'datahub-project:master' into feature/oracle_data_dictio…
Browse files Browse the repository at this point in the history
…nary_mode
  • Loading branch information
sleeperdeep authored Dec 5, 2023
2 parents 6ab6474 + 0d9aa26 commit 54385be
Show file tree
Hide file tree
Showing 317 changed files with 12,055 additions and 11,789 deletions.
11 changes: 10 additions & 1 deletion .github/actions/docker-custom-build-and-push/action.yml
Original file line number Diff line number Diff line change
Expand Up @@ -70,11 +70,20 @@ runs:
push: false
cache-from: type=registry,ref=${{ steps.docker_meta.outputs.tags }}
cache-to: type=inline
- name: Single Tag
if: ${{ inputs.publish != 'true' }}
shell: bash
run: |
TAGS="""
${{ steps.docker_meta.outputs.tags }}
"""
echo "SINGLE_TAG=$(echo $TAGS | tr '\n' ' ' | awk -F' ' '{ print $1 }')" >> $GITHUB_OUTPUT
id: single_tag
- name: Upload image locally for testing (if not publishing)
uses: ishworkh/docker-image-artifact-upload@v1
if: ${{ inputs.publish != 'true' }}
with:
image: ${{ steps.docker_meta.outputs.tags }}
image: ${{ steps.single_tag.outputs.SINGLE_TAG }}

# Code for building multi-platform images and pushing to Docker Hub.
- name: Set up QEMU
Expand Down
2 changes: 2 additions & 0 deletions .github/workflows/docker-unified.yml
Original file line number Diff line number Diff line change
Expand Up @@ -876,11 +876,13 @@ jobs:
]
steps:
- uses: aws-actions/configure-aws-credentials@v1
if: ${{ needs.setup.outputs.publish != 'false' }}
with:
aws-access-key-id: ${{ secrets.AWS_SQS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SQS_ACCESS_KEY }}
aws-region: us-west-2
- uses: isbang/[email protected]
if: ${{ needs.setup.outputs.publish != 'false' }}
with:
sqs-url: ${{ secrets.DATAHUB_HEAD_SYNC_QUEUE }}
message: '{ "command": "git-sync", "args" : {"repoName": "${{ needs.setup.outputs.repository_name }}", "repoOrg": "${{ github.repository_owner }}", "repoBranch": "${{ needs.setup.outputs.branch_name }}", "repoShaShort": "${{ needs.setup.outputs.short_sha }}" }}'
13 changes: 13 additions & 0 deletions .github/workflows/metadata-model.yml
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,19 @@ jobs:
run: ./metadata-ingestion/scripts/install_deps.sh
- name: Run model generation
run: ./gradlew :metadata-models:build
- name: Generate metadata files
if: ${{ needs.setup.outputs.publish == 'true' }}
run: ./gradlew :metadata-ingestion:modelDocGen
- name: Configure AWS Credentials
if: ${{ needs.setup.outputs.publish == 'true' }}
uses: aws-actions/configure-aws-credentials@v3
with:
aws-access-key-id: ${{ secrets.ACRYL_CI_ARTIFACTS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.ACRYL_CI_ARTIFACTS_ACCESS_KEY }}
aws-region: us-west-2
- name: Upload metadata to S3
if: ${{ needs.setup.outputs.publish == 'true' }}
run: aws s3 cp ./metadata-ingestion/generated/docs/metadata_model_mces.json s3://${{ secrets.ACRYL_CI_ARTIFACTS_BUCKET }}/datahub/demo/metadata/
- name: Upload metadata to DataHub
if: ${{ needs.setup.outputs.publish == 'true' }}
env:
Expand Down
1 change: 1 addition & 0 deletions build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -167,6 +167,7 @@ project.ext.externalDependency = [
'parquetHadoop': 'org.apache.parquet:parquet-hadoop:1.13.1',
'picocli': 'info.picocli:picocli:4.5.0',
'playCache': "com.typesafe.play:play-cache_2.12:$playVersion",
'playCaffeineCache': "com.typesafe.play:play-caffeine-cache_2.12:$playVersion",
'playWs': 'com.typesafe.play:play-ahc-ws-standalone_2.12:2.1.10',
'playDocs': "com.typesafe.play:play-docs_2.12:$playVersion",
'playGuice': "com.typesafe.play:play-guice_2.12:$playVersion",
Expand Down
18 changes: 17 additions & 1 deletion datahub-frontend/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -89,6 +89,22 @@ docker {
buildx(true)
load(true)
push(false)

// Add build args if they are defined (needed for some CI or enterprise environments)
def dockerBuildArgs = [:]
if (project.hasProperty('alpineApkRepositoryUrl')) {
dockerBuildArgs.ALPINE_REPO_URL = project.getProperty('alpineApkRepositoryUrl')
}
if (project.hasProperty('githubMirrorUrl')) {
dockerBuildArgs.GITHUB_REPO_URL = project.getProperty('githubMirrorUrl')
}
if (project.hasProperty('mavenCentralRepositoryUrl')) {
dockerBuildArgs.MAVEN_CENTRAL_REPO_URL = project.getProperty('mavenCentralRepositoryUrl')
}

if (dockerBuildArgs.size() > 0) {
buildArgs(dockerBuildArgs)
}
}

task unversionZip(type: Copy, dependsOn: [':datahub-web-react:build', dist]) {
Expand All @@ -104,4 +120,4 @@ task cleanLocalDockerImages {
rootProject.ext.cleanLocalDockerImages(docker_registry, docker_repo, "${version}")
}
}
dockerClean.finalizedBy(cleanLocalDockerImages)
dockerClean.finalizedBy(cleanLocalDockerImages)
1 change: 1 addition & 0 deletions datahub-frontend/play.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -58,6 +58,7 @@ dependencies {
implementation externalDependency.shiroCore

implementation externalDependency.playCache
implementation externalDependency.playCaffeineCache
implementation externalDependency.playWs
implementation externalDependency.playServer
implementation externalDependency.playAkkaHttpServer
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1174,10 +1174,6 @@ private void configureDatasetResolvers(final RuntimeWiring.Builder builder) {
.dataFetcher("testResults", new TestResultsResolver(entityClient))
.dataFetcher("aspects", new WeaklyTypedAspectsResolver(entityClient, entityRegistry))
.dataFetcher("exists", new EntityExistsResolver(entityService))
.dataFetcher("subTypes", new SubTypesResolver(
this.entityClient,
"dataset",
"subTypes"))
.dataFetcher("runs", new EntityRunsResolver(entityClient))
.dataFetcher("privileges", new EntityPrivilegesResolver(entityClient))
.dataFetcher("parentContainers", new ParentContainersResolver(entityClient)))
Expand Down Expand Up @@ -1433,10 +1429,6 @@ private void configureChartResolvers(final RuntimeWiring.Builder builder) {
.dataFetcher("statsSummary", new ChartStatsSummaryResolver(this.timeseriesAspectService))
.dataFetcher("privileges", new EntityPrivilegesResolver(entityClient))
.dataFetcher("exists", new EntityExistsResolver(entityService))
.dataFetcher("subTypes", new SubTypesResolver(
this.entityClient,
"chart",
"subTypes"))
);
builder.type("ChartInfo", typeWiring -> typeWiring
.dataFetcher("inputs", new LoadableTypeBatchResolver<>(datasetType,
Expand Down

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,8 @@ public class ChartType implements SearchableEntityType<Chart, String>, Browsable
INPUT_FIELDS_ASPECT_NAME,
EMBED_ASPECT_NAME,
DATA_PRODUCTS_ASPECT_NAME,
BROWSE_PATHS_V2_ASPECT_NAME
BROWSE_PATHS_V2_ASPECT_NAME,
SUB_TYPES_ASPECT_NAME
);
private static final Set<String> FACET_FIELDS = ImmutableSet.of("access", "queryType", "tool", "type");

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
import com.linkedin.common.InstitutionalMemory;
import com.linkedin.common.Ownership;
import com.linkedin.common.Status;
import com.linkedin.common.SubTypes;
import com.linkedin.common.urn.Urn;
import com.linkedin.data.DataMap;
import com.linkedin.datahub.graphql.generated.AccessLevel;
Expand All @@ -34,6 +35,7 @@
import com.linkedin.datahub.graphql.types.common.mappers.OwnershipMapper;
import com.linkedin.datahub.graphql.types.common.mappers.StatusMapper;
import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper;
import com.linkedin.datahub.graphql.types.common.mappers.SubTypesMapper;
import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper;
import com.linkedin.datahub.graphql.types.common.mappers.util.SystemMetadataUtils;
import com.linkedin.datahub.graphql.types.domain.DomainAssociationMapper;
Expand Down Expand Up @@ -97,6 +99,8 @@ public Chart apply(@Nonnull final EntityResponse entityResponse) {
chart.setEmbed(EmbedMapper.map(new Embed(dataMap))));
mappingHelper.mapToResult(BROWSE_PATHS_V2_ASPECT_NAME, (chart, dataMap) ->
chart.setBrowsePathV2(BrowsePathsV2Mapper.map(new BrowsePathsV2(dataMap))));
mappingHelper.mapToResult(SUB_TYPES_ASPECT_NAME, (dashboard, dataMap) ->
dashboard.setSubTypes(SubTypesMapper.map(new SubTypes(dataMap))));
return mappingHelper.getResult();
}

Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
package com.linkedin.datahub.graphql.types.common.mappers;

import com.linkedin.common.SubTypes;
import com.linkedin.datahub.graphql.types.mappers.ModelMapper;
import java.util.ArrayList;
import javax.annotation.Nonnull;

public class SubTypesMapper implements ModelMapper<SubTypes, com.linkedin.datahub.graphql.generated.SubTypes> {

public static final SubTypesMapper INSTANCE = new SubTypesMapper();

public static com.linkedin.datahub.graphql.generated.SubTypes map(@Nonnull final SubTypes metadata) {
return INSTANCE.apply(metadata);
}

@Override
public com.linkedin.datahub.graphql.generated.SubTypes apply(@Nonnull final SubTypes input) {
final com.linkedin.datahub.graphql.generated.SubTypes result = new com.linkedin.datahub.graphql.generated.SubTypes();
result.setTypeNames(new ArrayList<>(input.getTypeNames()));
return result;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
import com.linkedin.datahub.graphql.types.common.mappers.OwnershipMapper;
import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper;
import com.linkedin.datahub.graphql.types.common.mappers.StatusMapper;
import com.linkedin.datahub.graphql.types.common.mappers.SubTypesMapper;
import com.linkedin.datahub.graphql.types.common.mappers.util.SystemMetadataUtils;
import com.linkedin.datahub.graphql.types.domain.DomainAssociationMapper;
import com.linkedin.datahub.graphql.types.glossary.mappers.GlossaryTermsMapper;
Expand Down Expand Up @@ -97,7 +98,7 @@ public static Container map(final EntityResponse entityResponse) {

final EnvelopedAspect envelopedSubTypes = aspects.get(Constants.SUB_TYPES_ASPECT_NAME);
if (envelopedSubTypes != null) {
result.setSubTypes(mapSubTypes(new SubTypes(envelopedSubTypes.getValue().data())));
result.setSubTypes(SubTypesMapper.map(new SubTypes(envelopedSubTypes.getValue().data())));
}

final EnvelopedAspect envelopedContainer = aspects.get(Constants.CONTAINER_ASPECT_NAME);
Expand Down Expand Up @@ -150,12 +151,6 @@ private static com.linkedin.datahub.graphql.generated.ContainerEditablePropertie
return editableContainerProperties;
}

private static com.linkedin.datahub.graphql.generated.SubTypes mapSubTypes(final SubTypes gmsSubTypes) {
final com.linkedin.datahub.graphql.generated.SubTypes subTypes = new com.linkedin.datahub.graphql.generated.SubTypes();
subTypes.setTypeNames(gmsSubTypes.getTypeNames());
return subTypes;
}

private static DataPlatform mapPlatform(final DataPlatformInstance platformInstance) {
// Set dummy platform to be resolved.
final DataPlatform dummyPlatform = new DataPlatform();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@
import com.linkedin.datahub.graphql.types.common.mappers.OwnershipMapper;
import com.linkedin.datahub.graphql.types.common.mappers.StatusMapper;
import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper;
import com.linkedin.datahub.graphql.types.common.mappers.SubTypesMapper;
import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper;
import com.linkedin.datahub.graphql.types.common.mappers.util.SystemMetadataUtils;
import com.linkedin.datahub.graphql.types.domain.DomainAssociationMapper;
Expand Down Expand Up @@ -91,7 +92,8 @@ public Dashboard apply(@Nonnull final EntityResponse entityResponse) {
dataset.setDataPlatformInstance(DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(dataMap))));
mappingHelper.mapToResult(INPUT_FIELDS_ASPECT_NAME, (dashboard, dataMap) ->
dashboard.setInputFields(InputFieldsMapper.map(new InputFields(dataMap), entityUrn)));
mappingHelper.mapToResult(SUB_TYPES_ASPECT_NAME, this::mapSubTypes);
mappingHelper.mapToResult(SUB_TYPES_ASPECT_NAME, (dashboard, dataMap) ->
dashboard.setSubTypes(SubTypesMapper.map(new SubTypes(dataMap))));
mappingHelper.mapToResult(EMBED_ASPECT_NAME, (dashboard, dataMap) ->
dashboard.setEmbed(EmbedMapper.map(new Embed(dataMap))));
mappingHelper.mapToResult(BROWSE_PATHS_V2_ASPECT_NAME, (dashboard, dataMap) ->
Expand Down Expand Up @@ -204,13 +206,4 @@ private void mapDomains(@Nonnull Dashboard dashboard, @Nonnull DataMap dataMap)
final Domains domains = new Domains(dataMap);
dashboard.setDomain(DomainAssociationMapper.map(domains, dashboard.getUrn()));
}

private void mapSubTypes(@Nonnull Dashboard dashboard, DataMap dataMap) {
SubTypes pegasusSubTypes = new SubTypes(dataMap);
if (pegasusSubTypes.hasTypeNames()) {
com.linkedin.datahub.graphql.generated.SubTypes subTypes = new com.linkedin.datahub.graphql.generated.SubTypes();
subTypes.setTypeNames(pegasusSubTypes.getTypeNames().stream().collect(Collectors.toList()));
dashboard.setSubTypes(subTypes);
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,8 @@ public class DataJobType implements SearchableEntityType<DataJob, String>, Brows
DEPRECATION_ASPECT_NAME,
DATA_PLATFORM_INSTANCE_ASPECT_NAME,
DATA_PRODUCTS_ASPECT_NAME,
BROWSE_PATHS_V2_ASPECT_NAME
BROWSE_PATHS_V2_ASPECT_NAME,
SUB_TYPES_ASPECT_NAME
);
private static final Set<String> FACET_FIELDS = ImmutableSet.of("flow");
private final EntityClient _entityClient;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
import com.linkedin.common.InstitutionalMemory;
import com.linkedin.common.Ownership;
import com.linkedin.common.Status;
import com.linkedin.common.SubTypes;
import com.linkedin.common.urn.Urn;
import com.linkedin.data.DataMap;
import com.linkedin.datahub.graphql.generated.DataFlow;
Expand All @@ -27,6 +28,7 @@
import com.linkedin.datahub.graphql.types.common.mappers.OwnershipMapper;
import com.linkedin.datahub.graphql.types.common.mappers.StatusMapper;
import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper;
import com.linkedin.datahub.graphql.types.common.mappers.SubTypesMapper;
import com.linkedin.datahub.graphql.types.common.mappers.util.SystemMetadataUtils;
import com.linkedin.datahub.graphql.types.domain.DomainAssociationMapper;
import com.linkedin.datahub.graphql.types.glossary.mappers.GlossaryTermsMapper;
Expand Down Expand Up @@ -103,6 +105,8 @@ public DataJob apply(@Nonnull final EntityResponse entityResponse) {
result.setDataPlatformInstance(DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(data)));
} else if (BROWSE_PATHS_V2_ASPECT_NAME.equals(name)) {
result.setBrowsePathV2(BrowsePathsV2Mapper.map(new BrowsePathsV2(data)));
} else if (SUB_TYPES_ASPECT_NAME.equals(name)) {
result.setSubTypes(SubTypesMapper.map(new SubTypes(data)));
}
});

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,8 @@ public class DatasetType implements SearchableEntityType<Dataset, String>, Brows
EMBED_ASPECT_NAME,
DATA_PRODUCTS_ASPECT_NAME,
BROWSE_PATHS_V2_ASPECT_NAME,
ACCESS_DATASET_ASPECT_NAME
ACCESS_DATASET_ASPECT_NAME,
SUB_TYPES_ASPECT_NAME
);

private static final Set<String> FACET_FIELDS = ImmutableSet.of("origin", "platform");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
import com.linkedin.common.Ownership;
import com.linkedin.common.Siblings;
import com.linkedin.common.Status;
import com.linkedin.common.SubTypes;
import com.linkedin.common.TimeStamp;
import com.linkedin.common.urn.Urn;
import com.linkedin.data.DataMap;
Expand All @@ -29,6 +30,7 @@
import com.linkedin.datahub.graphql.types.common.mappers.SiblingsMapper;
import com.linkedin.datahub.graphql.types.common.mappers.StatusMapper;
import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper;
import com.linkedin.datahub.graphql.types.common.mappers.SubTypesMapper;
import com.linkedin.datahub.graphql.types.common.mappers.UpstreamLineagesMapper;
import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper;
import com.linkedin.datahub.graphql.types.common.mappers.util.SystemMetadataUtils;
Expand Down Expand Up @@ -114,6 +116,8 @@ public Dataset apply(@Nonnull final EntityResponse entityResponse) {
dataset.setBrowsePathV2(BrowsePathsV2Mapper.map(new BrowsePathsV2(dataMap))));
mappingHelper.mapToResult(ACCESS_DATASET_ASPECT_NAME, ((dataset, dataMap) ->
dataset.setAccess(AccessMapper.map(new Access(dataMap), entityUrn))));
mappingHelper.mapToResult(SUB_TYPES_ASPECT_NAME, (dashboard, dataMap) ->
dashboard.setSubTypes(SubTypesMapper.map(new SubTypes(dataMap))));
return mappingHelper.getResult();
}

Expand Down
5 changes: 5 additions & 0 deletions datahub-graphql-core/src/main/resources/entity.graphql
Original file line number Diff line number Diff line change
Expand Up @@ -5689,6 +5689,11 @@ type DataJob implements EntityWithRelationships & Entity & BrowsableEntity {
"""
type: EntityType!

"""
Sub Types that this entity implements
"""
subTypes: SubTypes

"""
The timestamp for the last time this entity was ingested
"""
Expand Down
16 changes: 16 additions & 0 deletions datahub-upgrade/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -108,6 +108,22 @@ docker {
buildx(true)
load(true)
push(false)

// Add build args if they are defined (needed for some CI or enterprise environments)
def dockerBuildArgs = [:]
if (project.hasProperty('alpineApkRepositoryUrl')) {
dockerBuildArgs.ALPINE_REPO_URL = project.getProperty('alpineApkRepositoryUrl')
}
if (project.hasProperty('githubMirrorUrl')) {
dockerBuildArgs.GITHUB_REPO_URL = project.getProperty('githubMirrorUrl')
}
if (project.hasProperty('mavenCentralRepositoryUrl')) {
dockerBuildArgs.MAVEN_CENTRAL_REPO_URL = project.getProperty('mavenCentralRepositoryUrl')
}

if (dockerBuildArgs.size() > 0) {
buildArgs(dockerBuildArgs)
}
}
tasks.getByPath(":datahub-upgrade:docker").dependsOn([bootJar])

Expand Down
Loading

0 comments on commit 54385be

Please sign in to comment.